diff --git beeline/pom.xml beeline/pom.xml index 4567d5e09b..19ec53eba6 100644 --- beeline/pom.xml +++ beeline/pom.xml @@ -55,6 +55,11 @@ hive-jdbc ${project.version} + + org.apache.hive + hive-standalone-metastore-server + ${project.version} + commons-cli diff --git beeline/src/java/org/apache/hive/beeline/BeeLine.java beeline/src/java/org/apache/hive/beeline/BeeLine.java index 4eda8e3ff5..29ec2de68c 100644 --- beeline/src/java/org/apache/hive/beeline/BeeLine.java +++ beeline/src/java/org/apache/hive/beeline/BeeLine.java @@ -1026,12 +1026,15 @@ public void updateOptsForCli() { getOpts().setNullEmptyString(true); } + public int begin(String[] args, InputStream inputStream) throws IOException { + return begin(args, inputStream, true); + } /** * Start accepting input from stdin, and dispatch it * to the appropriate {@link CommandHandler} until the * global variable exit is true. */ - public int begin(String[] args, InputStream inputStream) throws IOException { + public int begin(String[] args, InputStream inputStream, boolean keepHistory) throws IOException { try { // load the options first, so we can override on the command line getOpts().load(); @@ -1039,7 +1042,9 @@ public int begin(String[] args, InputStream inputStream) throws IOException { // nothing } - setupHistory(); + if (keepHistory) { + setupHistory(); + } //add shutdown hook to cleanup the beeline for smooth exit addBeelineShutdownHook(); @@ -1341,7 +1346,11 @@ public ConsoleReader initializeConsoleReader(InputStream inputStream) throws IOE try { // now set the output for the history - consoleReader.setHistory(this.history); + if (this.history != null) { + consoleReader.setHistory(this.history); + } else { + consoleReader.setHistoryEnabled(false); + } } catch (Exception e) { handleException(e); } diff --git beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java index 69514e51b7..c7234760e7 100644 --- beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java +++ beeline/src/java/org/apache/hive/beeline/schematool/HiveSchemaTool.java @@ -100,7 +100,7 @@ protected void execSql(String sqlScriptFile) throws IOException { // we always add a line separator at the end while calling dbCommandParser.buildCommand. beeLine.getOpts().setEntireLineAsCommand(true); LOG.debug("Going to run command <" + builder.buildToLog() + ">"); - int status = beeLine.begin(builder.buildToRun(), null); + int status = beeLine.begin(builder.buildToRun(), null, false); if (status != 0) { throw new IOException("Schema script failed, errorcode " + status); } diff --git bin/hive bin/hive index 1ade51eebd..a7ae2f571e 100755 --- bin/hive +++ bin/hive @@ -356,6 +356,7 @@ fi # include the log4j jar that is used for hive into the classpath CLASSPATH="${CLASSPATH}:${LOG_JAR_CLASSPATH}" export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${LOG_JAR_CLASSPATH}" +export JVM_PID="$$" if [ "$TORUN" = "" ] ; then echo "Service $SERVICE not found" diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index aa58d7445c..3e38fe66c5 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1679,7 +1679,15 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "joins unnecessary memory will be allocated and then trimmed."), HIVEHYBRIDGRACEHASHJOINBLOOMFILTER("hive.mapjoin.hybridgrace.bloomfilter", true, "Whether to " + "use BloomFilter in Hybrid grace hash join to minimize unnecessary spilling."), - + HIVEMAPJOINFULLOUTER("hive.mapjoin.full.outer", true, + "Whether to use MapJoin for FULL OUTER JOINs."), + HIVE_TEST_MAPJOINFULLOUTER_OVERRIDE( + "hive.test.mapjoin.full.outer.override", + "none", new StringSet("none", "enable", "disable"), + "internal use only, used to override the hive.mapjoin.full.outer\n" + + "setting. Using enable will force it on and disable will force it off.\n" + + "The default none is do nothing, of course", + true), HIVESMBJOINCACHEROWS("hive.smbjoin.cache.rows", 10000, "How many rows with the same key value should be cached in memory per smb joined table."), HIVEGROUPBYMAPINTERVAL("hive.groupby.mapaggr.checkinterval", 100000, @@ -1701,6 +1709,8 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "If the bucketing/sorting properties of the table exactly match the grouping key, whether to perform \n" + "the group by in the mapper by using BucketizedHiveInputFormat. The only downside to this\n" + "is that it limits the number of mappers to the number of files."), + HIVE_DEFAULT_NULLS_LAST("hive.default.nulls.last", true, + "Whether to set NULLS LAST as the default null ordering"), HIVE_GROUPBY_POSITION_ALIAS("hive.groupby.position.alias", false, "Whether to enable using Column Position Alias in Group By"), HIVE_ORDERBY_POSITION_ALIAS("hive.orderby.position.alias", true, @@ -2035,7 +2045,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "However, if it is on, and the predicted size of the larger input for a given join is greater \n" + "than this number, the join will not be converted to a dynamically partitioned hash join. \n" + "The value \"-1\" means no limit."), - HIVEHASHTABLEKEYCOUNTADJUSTMENT("hive.hashtable.key.count.adjustment", 2.0f, + HIVEHASHTABLEKEYCOUNTADJUSTMENT("hive.hashtable.key.count.adjustment", 0.99f, "Adjustment to mapjoin hashtable size derived from table and column statistics; the estimate" + " of the number of keys is divided by this value. If the value is 0, statistics are not used" + "and hive.hashtable.initialCapacity is used instead."), diff --git common/src/java/org/apache/hive/common/util/ProcessUtils.java common/src/java/org/apache/hive/common/util/ProcessUtils.java new file mode 100644 index 0000000000..409384fc00 --- /dev/null +++ common/src/java/org/apache/hive/common/util/ProcessUtils.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.common.util; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.util.List; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Process related utilities. + */ +public class ProcessUtils { + private static Logger LOG = LoggerFactory.getLogger(ProcessUtils.class); + + public static Integer getPid() { + // JVM_PID is exported by bin/hive + String pidStr = System.getenv("JVM_PID"); + + // in case if it is not set correctly used fallback from mxbean which is implementation specific + if (pidStr == null || pidStr.trim().isEmpty()) { + String name = ManagementFactory.getRuntimeMXBean().getName(); + if (name != null) { + int idx = name.indexOf("@"); + if (idx != -1) { + pidStr = name.substring(0, name.indexOf("@")); + } + } + } + try { + if (pidStr != null) { + return Integer.valueOf(pidStr); + } + } catch (NumberFormatException nfe) { + // ignore + } + return null; + } + + public static Process runCmdAsync(List cmd) { + try { + LOG.info("Running command async: " + cmd); + return new ProcessBuilder(cmd).inheritIO().start(); + } catch (IOException ex) { + throw new IllegalStateException(ex); + } + } +} diff --git common/src/java/org/apache/hive/http/HttpServer.java common/src/java/org/apache/hive/http/HttpServer.java index 3cb7a33c4e..24c5422a18 100644 --- common/src/java/org/apache/hive/http/HttpServer.java +++ common/src/java/org/apache/hive/http/HttpServer.java @@ -21,6 +21,9 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; @@ -558,6 +561,22 @@ private void initializeWebServer(final Builder b, int queueSize) throws IOExcept addServlet("conf", "/conf", ConfServlet.class); addServlet("stacks", "/stacks", StackServlet.class); addServlet("conflog", "/conflog", Log4j2ConfiguratorServlet.class); + final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); + if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { + addServlet("prof", "/prof", ProfileServlet.class); + Path tmpDir = Paths.get(ProfileServlet.OUTPUT_DIR); + if (Files.notExists(tmpDir)) { + Files.createDirectories(tmpDir); + } + ServletContextHandler genCtx = + new ServletContextHandler(contexts, "/prof-output"); + setContextAttributes(genCtx.getServletContext(), b.contextAttrs); + genCtx.addServlet(ProfileOutputServlet.class, "/*"); + genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); + genCtx.setDisplayName("prof-output"); + } else { + LOG.info("ASYNC_PROFILER_HOME env or -Dasync.profiler.home not specified. Disabling /prof endpoint.."); + } for (Pair> p : b.servlets) { addServlet(p.getFirst(), "/" + p.getFirst(), p.getSecond()); diff --git common/src/java/org/apache/hive/http/ProfileOutputServlet.java common/src/java/org/apache/hive/http/ProfileOutputServlet.java new file mode 100644 index 0000000000..fdca1f3cb3 --- /dev/null +++ common/src/java/org/apache/hive/http/ProfileOutputServlet.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.http; + +import java.io.File; +import java.io.IOException; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.eclipse.jetty.servlet.DefaultServlet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Servlet to serve files generated by {@link ProfileServlet} + */ +public class ProfileOutputServlet extends DefaultServlet { + private static final long serialVersionUID = 1L; + private static final Logger LOG = LoggerFactory.getLogger(ProfileOutputServlet.class); + + @Override + protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) + throws ServletException, IOException { + String absoluteDiskPath = getServletContext().getRealPath(req.getPathInfo()); + File requestedFile = new File(absoluteDiskPath); + // async-profiler version 1.4 writes 'Started [cpu] profiling' to output file when profiler is running which + // gets replaced by final output. If final output is not ready yet, the file size will be <100 bytes (in all modes). + if (requestedFile.length() < 100) { + LOG.info("{} is incomplete. Sending auto-refresh header..", requestedFile); + resp.setHeader("Refresh", "2," + req.getRequestURI()); + resp.getWriter().write("This page will auto-refresh every 2 second until output file is ready.."); + } else { + super.doGet(req, resp); + } + } +} \ No newline at end of file diff --git common/src/java/org/apache/hive/http/ProfileServlet.java common/src/java/org/apache/hive/http/ProfileServlet.java new file mode 100644 index 0000000000..48437563b4 --- /dev/null +++ common/src/java/org/apache/hive/http/ProfileServlet.java @@ -0,0 +1,359 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.http; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.hive.common.util.ProcessUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Joiner; + +/** + * Servlet that runs async-profiler as web-endpoint. + * Following options from async-profiler can be specified as query paramater. + * // -e event profiling event: cpu|alloc|lock|cache-misses etc. + * // -d duration run profiling for seconds (integer) + * // -i interval sampling interval in nanoseconds (long) + * // -j jstackdepth maximum Java stack depth (integer) + * // -b bufsize frame buffer size (long) + * // -t profile different threads separately + * // -s simple class names instead of FQN + * // -o fmt[,fmt...] output format: summary|traces|flat|collapsed|svg|tree|jfr + * // --width px SVG width pixels (integer) + * // --height px SVG frame height pixels (integer) + * // --minwidth px skip frames smaller than px (double) + * // --reverse generate stack-reversed FlameGraph / Call tree + * Example: + * - To collect 30 second CPU profile of current process (returns FlameGraph svg) + * curl "http://localhost:10002/prof" + * - To collect 1 minute CPU profile of current process and output in tree format (html) + * curl "http://localhost:10002/prof?output=tree&duration=60" + * - To collect 30 second heap allocation profile of current process (returns FlameGraph svg) + * curl "http://localhost:10002/prof?event=alloc" + * - To collect lock contention profile of current process (returns FlameGraph svg) + * curl "http://localhost:10002/prof?event=lock" + * Following event types are supported (default is 'cpu') (NOTE: not all OS'es support all events) + * // Perf events: + * // cpu + * // page-faults + * // context-switches + * // cycles + * // instructions + * // cache-references + * // cache-misses + * // branches + * // branch-misses + * // bus-cycles + * // L1-dcache-load-misses + * // LLC-load-misses + * // dTLB-load-misses + * // mem:breakpoint + * // trace:tracepoint + * // Java events: + * // alloc + * // lock + */ +public class ProfileServlet extends HttpServlet { + private static final long serialVersionUID = 1L; + private static final Logger LOG = LoggerFactory.getLogger(ProfileServlet.class); + private static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; + private static final String ALLOWED_METHODS = "GET"; + private static final String ACCESS_CONTROL_ALLOW_ORIGIN = "Access-Control-Allow-Origin"; + private static final String CONTENT_TYPE_TEXT = "text/plain; charset=utf-8"; + private static final String ASYNC_PROFILER_HOME_ENV = "ASYNC_PROFILER_HOME"; + private static final String ASYNC_PROFILER_HOME_SYSTEM_PROPERTY = "async.profiler.home"; + private static final String PROFILER_SCRIPT = "/profiler.sh"; + private static final int DEFAULT_DURATION_SECONDS = 10; + private static final AtomicInteger ID_GEN = new AtomicInteger(0); + static final String OUTPUT_DIR = System.getProperty("java.io.tmpdir") + "/prof-output"; + + enum Event { + CPU("cpu"), + ALLOC("alloc"), + LOCK("lock"), + PAGE_FAULTS("page-faults"), + CONTEXT_SWITCHES("context-switches"), + CYCLES("cycles"), + INSTRUCTIONS("instructions"), + CACHE_REFERENCES("cache-references"), + CACHE_MISSES("cache-misses"), + BRANCHES("branches"), + BRANCH_MISSES("branch-misses"), + BUS_CYCLES("bus-cycles"), + L1_DCACHE_LOAD_MISSES("L1-dcache-load-misses"), + LLC_LOAD_MISSES("LLC-load-misses"), + DTLB_LOAD_MISSES("dTLB-load-misses"), + MEM_BREAKPOINT("mem:breakpoint"), + TRACE_TRACEPOINT("trace:tracepoint"),; + + private String internalName; + + Event(final String internalName) { + this.internalName = internalName; + } + + public String getInternalName() { + return internalName; + } + + public static Event fromInternalName(final String name) { + for (Event event : values()) { + if (event.getInternalName().equalsIgnoreCase(name)) { + return event; + } + } + + return null; + } + } + + enum Output { + SUMMARY, + TRACES, + FLAT, + COLLAPSED, + SVG, + TREE, + JFR + } + + private Lock profilerLock = new ReentrantLock(); + private Integer pid; + private String asyncProfilerHome; + private Process process; + + public ProfileServlet() { + this.asyncProfilerHome = getAsyncProfilerHome(); + this.pid = ProcessUtils.getPid(); + LOG.info("Servlet process PID: {} asyncProfilerHome: {}", pid, asyncProfilerHome); + } + + @Override + protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws IOException { + if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), req, resp)) { + resp.setStatus(HttpServletResponse.SC_UNAUTHORIZED); + setResponseHeader(resp); + resp.getWriter().write("Unauthorized: Instrumentation access is not allowed!"); + return; + } + + // make sure async profiler home is set + if (asyncProfilerHome == null || asyncProfilerHome.trim().isEmpty()) { + resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + setResponseHeader(resp); + resp.getWriter().write("ASYNC_PROFILER_HOME env is not set."); + return; + } + + // if pid is explicitly specified, use it else default to current process + pid = getInteger(req, "pid", pid); + + // if pid is not specified in query param and if current process pid cannot be determined + if (pid == null) { + resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + setResponseHeader(resp); + resp.getWriter().write("'pid' query parameter unspecified or unable to determine PID of current process."); + return; + } + + final int duration = getInteger(req, "duration", DEFAULT_DURATION_SECONDS); + final Output output = getOutput(req); + final Event event = getEvent(req); + final Long interval = getLong(req, "interval"); + final Integer jstackDepth = getInteger(req, "jstackdepth", null); + final Long bufsize = getLong(req, "bufsize"); + final boolean thread = req.getParameterMap().containsKey("thread"); + final boolean simple = req.getParameterMap().containsKey("simple"); + final Integer width = getInteger(req, "width", null); + final Integer height = getInteger(req, "height", null); + final Double minwidth = getMinWidth(req); + final boolean reverse = req.getParameterMap().containsKey("reverse"); + + if (process == null || !process.isAlive()) { + try { + int lockTimeoutSecs = 3; + if (profilerLock.tryLock(lockTimeoutSecs, TimeUnit.SECONDS)) { + try { + File outputFile = new File(OUTPUT_DIR, "async-prof-pid-" + pid + "-" + + event.name().toLowerCase() + "-" + ID_GEN.incrementAndGet() + "." + + output.name().toLowerCase()); + List cmd = new ArrayList<>(); + cmd.add(asyncProfilerHome + PROFILER_SCRIPT); + cmd.add("-e"); + cmd.add(event.getInternalName()); + cmd.add("-d"); + cmd.add("" + duration); + cmd.add("-o"); + cmd.add(output.name().toLowerCase()); + cmd.add("-f"); + cmd.add(outputFile.getAbsolutePath()); + if (interval != null) { + cmd.add("-i"); + cmd.add(interval.toString()); + } + if (jstackDepth != null) { + cmd.add("-j"); + cmd.add(jstackDepth.toString()); + } + if (bufsize != null) { + cmd.add("-b"); + cmd.add(bufsize.toString()); + } + if (thread) { + cmd.add("-t"); + } + if (simple) { + cmd.add("-s"); + } + if (width != null) { + cmd.add("--width"); + cmd.add(width.toString()); + } + if (height != null) { + cmd.add("--height"); + cmd.add(height.toString()); + } + if (minwidth != null) { + cmd.add("--minwidth"); + cmd.add(minwidth.toString()); + } + if (reverse) { + cmd.add("--reverse"); + } + cmd.add(pid.toString()); + process = ProcessUtils.runCmdAsync(cmd); + + // set response and set refresh header to output location + setResponseHeader(resp); + resp.setStatus(HttpServletResponse.SC_ACCEPTED); + String relativeUrl = "/prof-output/" + outputFile.getName(); + resp.getWriter().write( + "Started [" + event.getInternalName() + "] profiling. This page will automatically redirect to " + + relativeUrl + " after " + duration + " seconds.\n\ncommand:\n" + Joiner.on(" ").join(cmd)); + + // to avoid auto-refresh by ProfileOutputServlet, refreshDelay can be specified via url param + int refreshDelay = getInteger(req, "refreshDelay", 0); + + // instead of sending redirect, set auto-refresh so that browsers will refresh with redirected url + resp.setHeader("Refresh", (duration + refreshDelay) + ";" + relativeUrl); + resp.getWriter().flush(); + } finally { + profilerLock.unlock(); + } + } else { + setResponseHeader(resp); + resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + resp.getWriter().write("Unable to acquire lock. Another instance of profiler might be running."); + LOG.warn("Unable to acquire lock in {} seconds. Another instance of profiler might be running.", + lockTimeoutSecs); + } + } catch (InterruptedException e) { + LOG.warn("Interrupted while acquiring profile lock.", e); + resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + } + } else { + setResponseHeader(resp); + resp.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + resp.getWriter().write("Another instance of profiler is already running."); + } + } + + private Integer getInteger(final HttpServletRequest req, final String param, final Integer defaultValue) { + final String value = req.getParameter(param); + if (value != null) { + try { + return Integer.valueOf(value); + } catch (NumberFormatException e) { + return defaultValue; + } + } + return defaultValue; + } + + private Long getLong(final HttpServletRequest req, final String param) { + final String value = req.getParameter(param); + if (value != null) { + try { + return Long.valueOf(value); + } catch (NumberFormatException e) { + return null; + } + } + return null; + } + + private Double getMinWidth(final HttpServletRequest req) { + final String value = req.getParameter("minwidth"); + if (value != null) { + try { + return Double.valueOf(value); + } catch (NumberFormatException e) { + return null; + } + } + return null; + } + + private Event getEvent(final HttpServletRequest req) { + final String eventArg = req.getParameter("event"); + if (eventArg != null) { + Event event = Event.fromInternalName(eventArg); + return event == null ? Event.CPU : event; + } + return Event.CPU; + } + + private Output getOutput(final HttpServletRequest req) { + final String outputArg = req.getParameter("output"); + if (req.getParameter("output") != null) { + try { + return Output.valueOf(outputArg.trim().toUpperCase()); + } catch (IllegalArgumentException e) { + return Output.SVG; + } + } + return Output.SVG; + } + + private void setResponseHeader(final HttpServletResponse response) { + response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, ALLOWED_METHODS); + response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*"); + response.setContentType(CONTENT_TYPE_TEXT); + } + + static String getAsyncProfilerHome() { + String asyncProfilerHome = System.getenv(ASYNC_PROFILER_HOME_ENV); + // if ENV is not set, see if -Dasync.profiler.home=/path/to/async/profiler/home is set + if (asyncProfilerHome == null || asyncProfilerHome.trim().isEmpty()) { + asyncProfilerHome = System.getProperty(ASYNC_PROFILER_HOME_SYSTEM_PROPERTY); + } + + return asyncProfilerHome; + } +} diff --git data/files/fullouter_long_big_1a.txt data/files/fullouter_long_big_1a.txt new file mode 100644 index 0000000000..8cf831fb4b --- /dev/null +++ data/files/fullouter_long_big_1a.txt @@ -0,0 +1,11 @@ +-5310365297525168078 +-6187919478609154811 +968819023021777205 +3313583664488247651 +-5206670856103795573 +\N +-6187919478609154811 +1569543799237464101 +-6187919478609154811 +-8460550397108077433 +-6187919478609154811 diff --git data/files/fullouter_long_big_1a_nonull.txt data/files/fullouter_long_big_1a_nonull.txt new file mode 100644 index 0000000000..b2325adefb --- /dev/null +++ data/files/fullouter_long_big_1a_nonull.txt @@ -0,0 +1,10 @@ +1569543799237464101 +-6187919478609154811 +968819023021777205 +-8460550397108077433 +-6187919478609154811 +-5310365297525168078 +-6187919478609154811 +-5206670856103795573 +3313583664488247651 +-6187919478609154811 diff --git data/files/fullouter_long_big_1b.txt data/files/fullouter_long_big_1b.txt new file mode 100644 index 0000000000..87c2b3c42b --- /dev/null +++ data/files/fullouter_long_big_1b.txt @@ -0,0 +1,13 @@ +\N +31713 +31713 +31713 +31713 +32030 +31713 +-25394 +31713 +31713 +31713 +31713 +31713 diff --git data/files/fullouter_long_big_1c.txt data/files/fullouter_long_big_1c.txt new file mode 100644 index 0000000000..2d13c260c9 --- /dev/null +++ data/files/fullouter_long_big_1c.txt @@ -0,0 +1,11 @@ +1928928239,\N +-1437463633,YYXPPCH +-1437463633,TKTKGVGFW +1725068083,MKSCCE +1928928239,\N +\N,ABBZ +1928928239,AMKTIWQ +-1437463633,JU +1928928239,VAQHVRI +-1437463633,SOWDWMS +-1437463633,\N diff --git data/files/fullouter_long_big_1d.txt data/files/fullouter_long_big_1d.txt new file mode 100644 index 0000000000..4137f67c6e --- /dev/null +++ data/files/fullouter_long_big_1d.txt @@ -0,0 +1,12 @@ +-702028721 +-702028721 +-1780951928 +-670834064 +-814597051 +\N +-814597051 +-814597051 +-702028721 +-2038654700 +\N +-814597051 diff --git data/files/fullouter_long_small_1a.txt data/files/fullouter_long_small_1a.txt new file mode 100644 index 0000000000..45d582508b --- /dev/null +++ data/files/fullouter_long_small_1a.txt @@ -0,0 +1,54 @@ +-1339636982994067311,2000-06-20 +-2575185053386712613,2105-01-21 +\N,2098-02-10 +-6784441713807772877,1845-02-16 +\N,2024-01-23 +-4224290881682877258,2185-07-08 +-614848861623872247,2101-05-25 +-2098090254092150988,2163-05-26 +434940853096155515,2275-02-08 +3873405809071478736,2034-06-09 +-2184423060953067642,1880-10-06 +7297177530102477725,1921-05-11 +7937120928560087303,2083-03-14 +\N,2242-02-08 +-2688622006344936758,2129-01-11 +214451696109242839,1977-01-04 +-4961171400048338491,2196-08-10 +4436884039838843341,2031-05-23 +2438535236662373438,1916-01-10 +6049335087268933751,2282-06-09 +8755921538765428593,1827-05-01 +5252407779338300447,2039-03-10 +-2184423060953067642,1853-07-06 +7297177530102477725,1926-04-12 +-2098090254092150988,1817-03-12 +-5754527700632192146,1958-07-15 +-614848861623872247,2112-11-09 +5246983111579595707,1817-07-01 +-2098090254092150988,2219-12-23 +-5706981533666803767,2151-06-09 +7297177530102477725,2125-08-26 +-7707546703881534780,2134-08-20 +214451696109242839,2179-04-18 +3845554233155411208,1805-11-10 +3905351789241845882,2045-12-05 +2438535236662373438,2026-06-23 +-2688622006344936758,1948-10-15 +6049335087268933751,2086-12-17 +-2575185053386712613,1809-07-12 +-327698348664467755,2222-10-15 +-4224290881682877258,1813-05-17 +3873405809071478736,2164-04-23 +-5706981533666803767,1800-09-20 +214451696109242839,1855-05-12 +2438535236662373438,1881-09-16 +5252407779338300447,2042-04-26 +-3655445881497026796,2108-08-16 +3905351789241845882,1866-07-28 +-6784441713807772877,2054-06-17 +5246983111579595707,2260-05-11 +-1339636982994067311,2008-12-03 +3873405809071478736,1918-11-20 +-4224290881682877258,2120-01-16 +3845554233155411208,2264-04-05 diff --git data/files/fullouter_long_small_1a_nonull.txt data/files/fullouter_long_small_1a_nonull.txt new file mode 100644 index 0000000000..bf94d5a866 --- /dev/null +++ data/files/fullouter_long_small_1a_nonull.txt @@ -0,0 +1,51 @@ +5246983111579595707,1817-07-01 +4436884039838843341,2031-05-23 +-4224290881682877258,1813-05-17 +-4961171400048338491,2196-08-10 +-2575185053386712613,2105-01-21 +5252407779338300447,2042-04-26 +-614848861623872247,2101-05-25 +-2098090254092150988,2163-05-26 +2438535236662373438,1881-09-16 +214451696109242839,2179-04-18 +2438535236662373438,2026-06-23 +-2184423060953067642,1853-07-06 +3873405809071478736,2164-04-23 +214451696109242839,1855-05-12 +-6784441713807772877,1845-02-16 +-2688622006344936758,1948-10-15 +7297177530102477725,1921-05-11 +-2575185053386712613,1809-07-12 +3905351789241845882,2045-12-05 +3845554233155411208,1805-11-10 +-3655445881497026796,2108-08-16 +3905351789241845882,1866-07-28 +-1339636982994067311,2008-12-03 +7297177530102477725,2125-08-26 +7297177530102477725,1926-04-12 +-5706981533666803767,1800-09-20 +6049335087268933751,2282-06-09 +3845554233155411208,2264-04-05 +8755921538765428593,1827-05-01 +-1339636982994067311,2000-06-20 +-2098090254092150988,1817-03-12 +3873405809071478736,2034-06-09 +2438535236662373438,1916-01-10 +5246983111579595707,2260-05-11 +-5706981533666803767,2151-06-09 +-614848861623872247,2112-11-09 +-327698348664467755,2222-10-15 +-2184423060953067642,1880-10-06 +434940853096155515,2275-02-08 +-4224290881682877258,2120-01-16 +-5754527700632192146,1958-07-15 +-4224290881682877258,2185-07-08 +-2098090254092150988,2219-12-23 +-7707546703881534780,2134-08-20 +214451696109242839,1977-01-04 +-2688622006344936758,2129-01-11 +7937120928560087303,2083-03-14 +-6784441713807772877,2054-06-17 +3873405809071478736,1918-11-20 +6049335087268933751,2086-12-17 +5252407779338300447,2039-03-10 diff --git data/files/fullouter_long_small_1b.txt data/files/fullouter_long_small_1b.txt new file mode 100644 index 0000000000..7d45fe4120 --- /dev/null +++ data/files/fullouter_long_small_1b.txt @@ -0,0 +1,72 @@ +2748,2298-06-20 21:01:24 +11232,2533-11-26 12:22:18 +\N,2124-05-07 15:01:19.021 +3198,2428-06-13 16:21:33.955 +-7624,2219-12-03 17:07:19 +24870,2752-12-26 12:32:23.03685163 +14865,2943-03-21 00:42:10.505 +-8624,2644-05-04 04:45:07.839 +-30059,2269-05-04 21:23:44.000339209 +14865,2079-10-06 16:54:35.117 +-8435,2834-12-06 16:38:18.901 +10553,2168-05-05 21:10:59.000152113 +-8624,2282-03-28 07:58:16 +-15361,2219-09-15 20:15:03.000169887 +-14172,1918-09-13 11:44:24.496926711 +26484,1919-03-04 07:32:37.519 +-14172,2355-01-14 23:23:34 +-24775,2920-08-06 15:58:28.261059449 +-23117,2037-01-05 21:52:30.685952759 +17125,2236-07-14 01:54:40.927230276 +21181,2253-03-12 11:55:48.332 +-7373,2662-10-28 12:07:02.000526564 +-8087,2550-06-26 23:57:42.588007617 +29407,2385-12-14 06:03:39.597 +21181,2434-02-20 00:46:29.633 +-14172,2809-06-07 02:10:58 +13598,2421-05-20 14:18:31.000264698 +2748,2759-02-13 18:04:36.000307355 +-22422,1949-03-13 00:07:53.075 +26484,2953-03-10 02:05:26.508953676 +4510,2777-03-24 03:44:28.000169723 +-24775,2035-03-26 08:11:23.375224153 +-30059,2713-10-13 09:28:49 +-20517,2774-06-23 12:04:06.5 +11232,2038-04-06 14:53:59 +32030,2101-09-09 07:35:05.145 +-29600,2333-11-02 15:06:30 +-30306,2619-05-24 10:35:58.000774018 +-7624,2289-08-28 00:14:34 +-4279,2470-08-12 11:21:14.000955747 +-4279,2214-09-10 03:53:06 +-26998,2428-12-26 07:53:45.96925825 +17125,2629-11-15 15:34:52 +-8087,2923-07-02 11:40:26.115 +2632,2561-12-15 15:42:27 +21436,2696-05-08 05:19:24.112 +\N,2971-08-07 12:02:11.000948152 +-7624,2623-03-20 03:18:45.00006465 +-26998,2926-07-18 09:02:46.077 +11232,2507-01-27 22:04:22.49661421 +-30059,2420-12-10 22:12:30 +-15427,2355-01-08 12:34:11.617 +3198,2223-04-14 13:20:49 +-19167,2319-08-26 11:07:11.268 +14865,2220-02-28 03:41:36 +-20517,2233-12-20 04:06:56.666522799 +-15427,2046-06-07 22:58:40.728 +2748,2862-04-20 13:12:39.482805897 +-8435,2642-02-07 11:45:04.353231638 +-19167,2230-12-22 20:25:39.000242111 +-15427,2023-11-09 19:31:21 +13598,2909-06-25 23:22:50 +21436,2526-09-22 23:44:55 +-15361,2434-08-13 20:37:07.000172979 +4510,2293-01-17 13:47:41.00001006 +-8624,2120-02-15 15:36:40.000758423 +-22422,2337-07-19 06:33:02.000353352 +-26998,2268-08-04 12:48:11.848006292 +-22422,2982-12-28 06:30:26.000883228 +\N,2933-06-20 11:48:09.000839488 +3198,2736-12-20 03:59:50.343550301 +-20824,2478-11-05 00:28:05 diff --git data/files/fullouter_long_small_1c.txt data/files/fullouter_long_small_1c.txt new file mode 100644 index 0000000000..ff323d367e --- /dev/null +++ data/files/fullouter_long_small_1c.txt @@ -0,0 +1,81 @@ +-1093006502,-69.55665828 +452719211,83003.43722 +1242586043,71.1485 +-934092157,-7843850349.57130038 +294598722,-3542.6 +284554389,5.727146 +90660785,12590.288613 +-99948814,-38076694.3981 +466567142,-9763217822.129028 +1909136587,-8610.078036935181 +1242586043,-4 +\N,1.089120893565337 +1039864870,987601.57 +-466171792,0 +-1681455031,-6.4543 +1755897735,-39.965207 +1585021913,745222.66808954 +448130683,-4302.485366846491 +193709887,0.8 +-424713789,0.48 +1585021913,607.22747 +-1250662632,5454127198.951479 +294598722,-9377326244.444 +193709887,-19889.83 +1039864870,0.7 +1242586043,-749975924224.63 +-1250662632,-544.554649 +-1740848088,-9.157 +-369457052,7.7 +-369457052,560.11907883090455 +90660785,-4564.517185 +466567142,-58810.60586 +466567142,196.5785295398584 +1738753776,1525.280459649262 +1816559437,-1035.7009 +-1490239076,92253.232096 +1039864870,94.04 +560745412,678.25 +-466171792,4227.5344 +1561921421,53050.55 +-99948814,-96386.438 +1519948464,152 +1719049112,-7888197 +-793950320,-16 +-466171792,69.9 +1738753776,-99817635066320.2416 +1091836730,0.02 +891262439,-0.04 +452719211,3020.2938930744636 +-2048404259,3939387044.1 +698032489,-330457.4292625839 +-1197550983,-0.5588796922 +-2123273881,-55.89198 +-2048404259,-0.3222960446251 +1585021913,-5762331.06697112 +1785750809,47443.115 +1909136587,181.07681535944 +1801735854,-1760956929364.267 +\N,4.26165227 +1801735854,-438541294.7 +150678276,-8278 +1479580778,92077343080.7 +1091836730,-5017.14 +193709887,-0.5663 +-1681455031,-11105.372477 +-1250662632,93104 +-1197550983,0.1 +\N,682070836.2649603 +-1197550983,71852.8338674412613 +1561921421,-5.405 +-1740848088,0.506394259 +150678276,15989394.8436 +-793950320,-0.1 +-1740848088,901.441 +-477147437,6 +-1264372462,0.883 +-2123273881,3.959 +-1264372462,-6993985240226 +-1264372462,-899 +-243940373,-97176129669.654953 +-243940373,-583.258 diff --git data/files/fullouter_long_small_1d.txt data/files/fullouter_long_small_1d.txt new file mode 100644 index 0000000000..9778d3ff62 --- /dev/null +++ data/files/fullouter_long_small_1d.txt @@ -0,0 +1,39 @@ +533298451 +1164387380 +1614287784 +1635405412 +-1912571616 +-894799664 +-1210744742 +-1014271154 +-747044796 +-1003639073 +436878811 +-1323620496 +-1379355738 +-1712018127 +246169862 +1431997749 +670834064 +1780951928 +-707688773 +1997943409 +1372592319 +-932176731 +162858059 +-683339273 +-497171161 +699863556 +1685473722 +41376947 +-1036083124 +1825107160 +-2038654700 +2119085509 +260588085 +-1792852276 +1831520491 +103640700 +\N +699007128 +1840266070 diff --git data/files/fullouter_multikey_big_1a.txt data/files/fullouter_multikey_big_1a.txt new file mode 100644 index 0000000000..fe38c7b528 --- /dev/null +++ data/files/fullouter_multikey_big_1a.txt @@ -0,0 +1,13 @@ +22767,-1969080993 +-17582,-1730236061 +3556,\N +-17582,1082230084 +-17582,827141667 +1499,371855128 +-17582,9637312 +\N,1082230084 +-6131,-1969080993 +3556,-1969080993 +\N,\N +-18222,-1969080993 +-17582,267529350 diff --git data/files/fullouter_multikey_big_1a_nonull.txt data/files/fullouter_multikey_big_1a_nonull.txt new file mode 100644 index 0000000000..40e84b04be --- /dev/null +++ data/files/fullouter_multikey_big_1a_nonull.txt @@ -0,0 +1,10 @@ +-17582,1082230084 +22767,-1969080993 +-17582,827141667 +-17582,-1730236061 +3556,-1969080993 +-6131,-1969080993 +-18222,-1969080993 +1499,371855128 +-17582,267529350 +-17582,9637312 diff --git data/files/fullouter_multikey_big_1b.txt data/files/fullouter_multikey_big_1b.txt new file mode 100644 index 0000000000..40cfb9a954 --- /dev/null +++ data/files/fullouter_multikey_big_1b.txt @@ -0,0 +1,17 @@ +2061-12-19 22:10:32.000628309,21635,ANCO +\N,21635,ANCO +2686-05-23 07:46:46.565832918,13212,NCYBDW +2082-07-14 04:00:40.695380469,12556,NCYBDW +2188-06-04 15:03:14.963259704,9468,AAA +2608-02-23 23:44:02.546440891,26184,NCYBDW +2093-04-10 23:36:54.846,\N,\N +2898-10-01 22:27:02.000871113,10361,NCYBDW +2306-06-21 11:02:00.143124239,1446,\N +\N,-6909,\N +\N,\N,\N +2306-06-21 11:02:00.143124239,-6909,NCYBDW +2093-04-10 23:36:54.846,1446,GHZVPWFO +\N,\N,CCWYD +2686-05-23 07:46:46.565832918,\N,GHZVPWFO +2093-04-10 23:36:54.846,28996,Q +2299-11-15 16:41:30.401,-31077,NCYBDW diff --git data/files/fullouter_multikey_small_1a.txt data/files/fullouter_multikey_small_1a.txt new file mode 100644 index 0000000000..4e0742c8b8 --- /dev/null +++ data/files/fullouter_multikey_small_1a.txt @@ -0,0 +1,92 @@ +23015,258882280 +23015,-276888585 +21186,-586336015 +-22311,-2055239583 +3412,-1249487623 +\N,1082230084 +20156,-1618478138 +-17788,-738743861 +-24206,-1456409156 +30353,2044473567 +20969,-1995259010 +-23457,-63842445 +3412,-2081156563 +-6131,-1969080993 +23015,-252525791 +30353,1364268303 +23015,564751472 +15404,1078466156 +4586,-586336015 +-4117,-1386947816 +-26894,-63842445 +-17788,-1361776766 +-7386,-2112062470 +23015,-1893013623 +30353,1241923267 +-24206,641361618 +-28129,-2055239583 +-20125,-1995259010 +16166,931172175 +31443,-1968665833 +-28313,837320573 +11460,1078466156 +15061,-63842445 +13672,-63842445 +14400,-825652334 +-7386,100736776 +26944,-1995259010 +-11868,97203778 +12089,-63842445 +-28137,-63842445 +3412,1253976194 +-980,2009785365 +16696,-63842445 +-11868,930596435 +4902,1078466156 +-17582,267529350 +-12252,964377504 +20156,963883665 +-11868,1658440922 +4779,-1995259010 +-7386,-1635102480 +-28313,51228026 +-11868,1052120431 +-980,-270600267 +-20900,1078466156 +\N,\N +20156,1165375499 +30353,-1507157031 +3412,-1196037018 +22934,-1695419330 +30353,105613996 +-17788,-872691214 +-980,-333603940 +30353,-1011627089 +-11868,-3536499 +-2407,1078466156 +23015,-217613200 +-28313,-706104224 +-980,712692345 +-11868,1456809245 +-17788,528419995 +-11868,-915441041 +-980,628784462 +30353,-1007182618 +23015,-696928205 +-980,356970043 +23015,-893234501 +-980,-465544127 +-5734,1078466156 +-980,-801821285 +26738,-2055239583 +8177,-1995259010 +-11868,1318114822 +3890,1411429004 +-6061,-586336015 +3412,-2132472060 +-15212,-2055239583 +-12252,1956403781 +5957,-1995259010 +-1787,-63842445 +20156,1855042153 +-980,1310479628 diff --git data/files/fullouter_multikey_small_1a_nonull.txt data/files/fullouter_multikey_small_1a_nonull.txt new file mode 100644 index 0000000000..2a8b9a1d90 --- /dev/null +++ data/files/fullouter_multikey_small_1a_nonull.txt @@ -0,0 +1,90 @@ +16696,-63842445 +4586,-586336015 +26738,-2055239583 +-17788,-738743861 +-28313,-706104224 +-23457,-63842445 +-20900,1078466156 +-12252,964377504 +-28313,51228026 +-11868,-3536499 +11460,1078466156 +26944,-1995259010 +20156,1855042153 +-11868,97203778 +15061,-63842445 +-17788,528419995 +-26894,-63842445 +-28313,837320573 +20156,963883665 +-15212,-2055239583 +5957,-1995259010 +30353,-1011627089 +3890,1411429004 +-980,-333603940 +13672,-63842445 +-980,628784462 +23015,-252525791 +-11868,1052120431 +-980,356970043 +23015,-217613200 +-6061,-586336015 +-5734,1078466156 +-11868,1318114822 +23015,258882280 +-2407,1078466156 +12089,-63842445 +3412,-2132472060 +-28129,-2055239583 +-980,-270600267 +16166,931172175 +-7386,100736776 +4902,1078466156 +20969,-1995259010 +22934,-1695419330 +3412,-1249487623 +3412,1253976194 +21186,-586336015 +8177,-1995259010 +-7386,-1635102480 +-11868,1456809245 +-20125,-1995259010 +-980,-801821285 +-980,1310479628 +23015,564751472 +23015,-893234501 +4779,-1995259010 +-980,2009785365 +-24206,641361618 +30353,-1507157031 +14400,-825652334 +3412,-2081156563 +20156,-1618478138 +31443,-1968665833 +-22311,-2055239583 +30353,1241923267 +-11868,930596435 +-17788,-1361776766 +-24206,-1456409156 +-7386,-2112062470 +30353,1364268303 +23015,-1893013623 +-17788,-872691214 +30353,2044473567 +-28137,-63842445 +30353,105613996 +-6131,-1969080993 +-17582,267529350 +23015,-276888585 +-12252,1956403781 +23015,-696928205 +-11868,1658440922 +-1787,-63842445 +-11868,-915441041 +-980,-465544127 +30353,-1007182618 +-980,712692345 +20156,1165375499 +3412,-1196037018 +15404,1078466156 +-4117,-1386947816 diff --git data/files/fullouter_multikey_small_1b.txt data/files/fullouter_multikey_small_1b.txt new file mode 100644 index 0000000000..b56a3f7f46 --- /dev/null +++ data/files/fullouter_multikey_small_1b.txt @@ -0,0 +1,118 @@ +2304-12-15 15:31:16,11101,YJCKKCR,-0.2 +2018-11-25 22:27:55.84,-12202,VBDBM,7506645.9537 +1957-03-06 09:57:31,-26373,NXLNNSO,2 +2332-06-14 07:02:42.32,-26373,XFFFDTQ,56845106806308.9 +2535-03-01 05:04:49.000525883,23663,ALIQKNXHE,-0.1665691 +2629-04-07 01:54:11,-6776,WGGFVFTW,6.8012851708 +2266-09-26 06:27:29.000284762,20223,EDYJJN,14 +2969-01-23 14:08:04.000667259,-18138,VDPN,8924831210.42768019 +2861-05-27 07:13:01.000848622,-19598,WKPXNLXS,29399 +2301-06-03 17:16:19,15332,ZVEUKC,0.5 +1980-09-13 19:57:15,\N,M,57650.7723 +2304-12-15 15:31:16,1301,T,-0.8 +2461-03-09 09:54:45.000982385,-16454,ZSMB,-991.43605 +2044-05-02 07:00:03.35,-8751,ZSMB,-453797242.029791752 +2409-09-23 10:33:27,2638,XSXR,-9926693851 +1941-10-16 02:19:36.000423663,-24459,AO,-821445414.4579712 +2512-10-06 03:03:03,-3465,VZQ,-49.51219 +2971-02-14 09:13:19,-16605,BVACIRP,-5.751278023 +2075-10-25 20:32:40.000792874,\N,\N,226612651968.36076 +2073-03-21 15:32:57.617920888,26425,MPRACIRYW,5 +2969-01-23 14:08:04.000667259,14500,WXLTRFQP,-23.8198 +2898-12-18 03:37:17,-24459,MHNBXPBM,14.23669356238481 +\N,\N,\N,-2207.3 +2391-01-17 15:28:37.00045143,16160,ZVEUKC,771355639420297.133 +2309-01-15 12:43:49,22821,ZMY,40.9 +2340-12-15 05:15:17.133588982,23663,HHTP,33383.8 +2969-01-23 14:08:04.000667259,-8913,UIMQ,9.178 +2145-10-15 06:58:42.831,2638,\N,-9784.82 +2888-05-08 08:36:55.182302102,5786,ZVEUKC,-56082455.033918 +2467-05-11 06:04:13.426693647,23196,EIBSDASR,-8.5548883801 +2829-06-04 08:01:47.836,22771,ZVEUKC,94317.75318 +2938-12-21 23:35:59.498,29362,ZMY,0.88 +2304-12-15 15:31:16,-13125,JFYW,6.086657 +2808-07-09 02:10:11.928498854,-19598,FHFX,0.3 +2083-06-07 09:35:19.383,-26373,MR,-394.0867 +2686-05-23 07:46:46.565832918,13212,NCYBDW,-917116793.4 +2969-01-23 14:08:04.000667259,-8913,UIMQ,-375994644577.315257 +2338-02-12 09:30:07,20223,CTH,-6154.763054 +2629-04-07 01:54:11,-6776,WGGFVFTW,41.77451507786646 +2242-08-04 07:51:46.905,20223,UCYXACQ,37.7288 +2637-03-12 22:25:46.385,-12923,PPTJPFR,5.4 +2304-12-15 15:31:16,8650,RLNO,0.71351747335 +2688-02-06 20:58:42.000947837,20223,PAIY,67661.735 +\N,\N,\N,-2.4 +2512-10-06 03:03:03,-3465,VZQ,0.4458 +2960-04-12 07:03:42.000366651,20340,CYZYUNSF,-96.3 +2461-03-09 09:54:45.000982385,-16454,ZSMB,-9575827.55396 +2512-10-06 03:03:03,1560,X,-922.6951584107 +2396-04-06 15:39:02.404013577,29661,ZSMB,0.76718326 +2409-09-23 10:33:27,2638,XSXR,0.4 +2969-01-23 14:08:04.000667259,6689,TFGVOGPJF,-0.01 +2333-07-28 09:59:26,23196,RKSK,37872288434740893.5 +2409-09-23 10:33:27,2638,XSXR,-162.95 +2357-05-08 07:09:09.000482799,6226,ZSMB,-472 +2304-12-15 15:31:16,15090,G,-4319470286240016.3 +2304-12-15 15:31:16,1301,T,61.302 +2105-01-04 16:27:45,23100,ZSMB,-83.2328 +2242-08-04 07:51:46.905,20223,UCYXACQ,-0.26149 +2637-03-12 22:25:46.385,-17786,HYEGQ,-84.169614329419 +1931-12-04 11:13:47.269597392,23196,HVJCQMTQL,-9697532.8994 +2897-08-10 15:21:47.09,23663,XYUVBED,6370 +2888-05-08 08:36:55.182302102,5786,ZVEUKC,57.62175257788037 +2145-10-15 06:58:42.831,2638,UANGISEXR,-5996.306 +2462-12-16 23:11:32.633305644,-26373,CB,67.41799 +2396-04-06 15:39:02.404013577,29661,ZSMB,-5151598.347 +2304-12-15 15:31:16,15090,G,975 +2512-10-06 03:03:03,32099,ARNZ,-0.41 +2188-06-04 15:03:14.963259704,9468,AAA,2.75496352 +2512-10-06 03:03:03,1560,X,761196.522 +2304-12-15 15:31:16,1301,T,2720.8 +1919-06-20 00:16:50.611028595,20223,ZKBC,-23 +2897-08-10 15:21:47.09,23663,XYUVBED,51.7323303273 +2086-04-09 00:03:10,20223,THXNJGFFV,-85184687349898.892 +2238-05-17 19:27:25.519,20223,KQCM,-0.01095 +2086-04-09 00:03:10,20223,THXNJGFFV,482.5383411359219 +2480-10-02 09:31:37.000770961,-26373,NBN,-5875.5197252 +2086-04-09 00:03:10,20223,THXNJGFFV,0.4396861 +2759-11-26 22:19:55.410967136,-27454,ZMY,60.6025797 +2083-06-07 09:35:19.383,-26373,MR,67892053.02376094 +2882-05-20 07:21:25.221299462,23196,U,-9951044 +2971-02-14 09:13:19,-16605,BVACIRP,-27394351.3 +2512-10-06 03:03:03,24313,QBHUG,-8423.151573236 +2882-05-20 07:21:25.221299462,23196,U,-4244.926206619 +1905-04-20 13:42:25.000469776,2638,KAUUFF,7 +2410-05-03 13:44:56,2638,PHOR,-769088.176482 +2668-06-25 07:12:37.000970744,2638,TJE,-2.7796827 +2969-01-23 14:08:04.000667259,-32485,AGEPWWLJF,-48431309405.652522 +2410-05-03 13:44:56,2638,PHOR,93262.914526611 +2512-10-06 03:03:03,13195,CRJ,14 +2018-11-25 22:27:55.84,-12202,VBDBM,98790.713907420831 +2304-12-15 15:31:16,8650,RLNO,-0.4355 +2071-07-21 20:02:32.000250697,2638,NRUV,-66198.351092 +2525-05-12 15:59:35,-24459,SAVRGA,53106747151.8633 +2637-03-12 22:25:46.385,21841,CXTI,749563668434009.65 +2018-11-25 22:27:55.84,-22419,LOTLS,342.3726040228584 +2637-03-12 22:25:46.385,21841,CXTI,7362887891522.3782 +2038-10-12 09:15:33.000539653,-19598,YKNIAJW,-642807895924.66 +2957-05-07 10:41:46,20223,OWQT,-586953.153681 +2304-12-15 15:31:16,11101,YJCKKCR,1279917802.42 +2355-09-23 19:52:34.638084141,-19598,H,92.15 +2960-04-12 07:03:42.000366651,20340,CYZYUNSF,2.1577659 +2355-09-23 19:52:34.638084141,-19598,H,74179461.880493 +2969-01-23 14:08:04.000667259,-8913,UIMQ,-81 +\N,-12914,ZVEUKC,221 +2743-12-27 05:16:19.000573579,-12914,ZVEUKC,-811984611.5178497 +1957-02-01 14:00:29.000548421,-16085,ZVEUKC,-2312.8149 +2201-07-05 17:22:06.084206844,-24459,UBGT,1.5069483282 +2461-03-09 09:54:45.000982385,-16454,ZSMB,8694.89 +2169-04-02 06:30:32,23855,PDVQATOS,-1515597428 +2304-12-15 15:31:16,30285,GSJPSIYOU,0.2 +2913-07-17 15:06:58.041,-10206,\N,-0.2 +2169-04-02 06:30:32,23855,PDVQATOS,-4016.9608 +2759-11-26 22:19:55.410967136,-27454,ZMY,368 +2073-03-21 15:32:57.617920888,26425,MPRACIRYW,726945733.4193 +2304-12-15 15:31:16,11101,YJCKKCR,-0.5 +2462-12-16 23:11:32.633305644,-26373,CB,-582687 +2357-05-08 07:09:09.000482799,6226,ZSMB,-32.46 +2304-12-15 15:31:16,12587,OPW,-4.59489504 diff --git data/files/fullouter_string_big_1a.txt data/files/fullouter_string_big_1a.txt new file mode 100644 index 0000000000..1cbcd05d26 --- /dev/null +++ data/files/fullouter_string_big_1a.txt @@ -0,0 +1,13 @@ +FTWURVH +QNCYBDW +UA +WXHJ +\N +WXHJ +PXLD +WXHJ +PXLD +WXHJ +WXHJ +MXGDMBD +PXLD diff --git data/files/fullouter_string_big_1a_nonull.txt data/files/fullouter_string_big_1a_nonull.txt new file mode 100644 index 0000000000..a6566f2916 --- /dev/null +++ data/files/fullouter_string_big_1a_nonull.txt @@ -0,0 +1,12 @@ +WXHJ +WXHJ +FTWURVH +MXGDMBD +UA +WXHJ +QNCYBDW +PXLD +PXLD +WXHJ +PXLD +WXHJ diff --git data/files/fullouter_string_big_1a_old.txt data/files/fullouter_string_big_1a_old.txt new file mode 100644 index 0000000000..1fa51ad799 --- /dev/null +++ data/files/fullouter_string_big_1a_old.txt @@ -0,0 +1,13 @@ +WXHJ +WXHJ +WXHJ +WXHJ +WXHJ +QNCYBDW +PXLD +PXLD +PXLD +UA +\N +FTWURVH +MXGDMBD diff --git data/files/fullouter_string_small_1a.txt data/files/fullouter_string_small_1a.txt new file mode 100644 index 0000000000..f223da07e2 --- /dev/null +++ data/files/fullouter_string_small_1a.txt @@ -0,0 +1,38 @@ +BDBMW,2278-04-27,2101-02-21 08:53:34.692 +FROPIK,2023-02-28,2467-05-11 06:04:13.426693647 +GOYJHW,1976-03-06,2805-07-10 10:51:57.00083302 +MXGDMBD,1880-11-01,2765-10-06 13:28:17.000688592 +CQMTQLI,2031-09-13,1927-02-13 08:39:25.000919094 +,1985-01-22,2111-01-10 15:44:28 +IOQIDQBHU,2198-02-08,2073-03-21 15:32:57.617920888 +GSJPSIYOU,1948-07-17,2006-09-24 16:01:24.000239251 +\N,1865-11-08,2893-04-07 07:36:12 +BEP,2206-08-10,2331-10-09 10:59:51 +NADANUQMW,2037-10-19,2320-04-26 18:50:25.000426922 +\N,2250-04-22,2548-03-21 08:23:13.133573801 +ATZJTPECF,1829-10-16,2357-05-08 07:09:09.000482799 +IWEZJHKE,\N,\N +AARNZRVZQ,2002-10-23,2525-05-12 15:59:35 +BEP,2141-02-19,2521-06-09 01:20:07.121 +AARNZRVZQ,2000-11-13,2309-06-05 19:54:13 +LOTLS,1957-11-09,2092-06-07 06:42:30.000538454 +FROPIK,2124-10-01,2974-07-06 12:05:08.000146048 +KL,1980-09-22,2073-08-25 11:51:10.318 +\N,1915-02-22,2554-10-27 09:34:30 +WNGFTTY,1843-06-10,2411-01-28 20:03:59 +VNRXWQ,1883-02-06,2287-07-17 16:46:58.287 +QTSRKSKB,2144-01-13,2627-12-20 03:38:53.000389266 +GOYJHW,1959-04-27,\N +LOTLS,2099-08-04,2181-01-25 01:04:25.000030055 +CQMTQLI,2090-11-13,2693-03-17 16:19:55.82 +VNRXWQ,2276-11-16,2072-08-16 17:45:47.48349887 +LOTLS,2126-09-16,1977-12-15 15:28:56 +FTWURVH,1976-03-10,2683-11-22 13:07:04.66673556 +,2021-02-21,2802-04-21 18:48:18.5933838 +ZNOUDCR,\N,1988-04-23 08:40:21 +FROPIK,2214-02-09,1949-08-18 17:14:38.000703738 +SDA,2196-04-12,2462-10-26 19:28:12.733 +WNGFTTY,2251-08-16,2649-12-21 18:30:42.498 +GOYJHW,1993-04-07,1950-05-04 09:28:22.000114784 +FYW,1807-03-20,2305-08-17 01:32:44 +ATZJTPECF,2217-10-22,2808-10-20 16:01:24.558 diff --git data/files/fullouter_string_small_1a_nonull.txt data/files/fullouter_string_small_1a_nonull.txt new file mode 100644 index 0000000000..6b97ef4a1d --- /dev/null +++ data/files/fullouter_string_small_1a_nonull.txt @@ -0,0 +1,35 @@ +LOTLS,2126-09-16,1977-12-15 15:28:56 +MXGDMBD,1880-11-01,2765-10-06 13:28:17.000688592 +WNGFTTY,2251-08-16,2649-12-21 18:30:42.498 +QTSRKSKB,2144-01-13,2627-12-20 03:38:53.000389266 +AARNZRVZQ,2002-10-23,2525-05-12 15:59:35 +BEP,2141-02-19,2521-06-09 01:20:07.121 +ZNOUDCR,\N,1988-04-23 08:40:21 +FROPIK,2023-02-28,2467-05-11 06:04:13.426693647 +GOYJHW,1993-04-07,1950-05-04 09:28:22.000114784 +CQMTQLI,2090-11-13,2693-03-17 16:19:55.82 +BDBMW,2278-04-27,2101-02-21 08:53:34.692 +AARNZRVZQ,2000-11-13,2309-06-05 19:54:13 +FYW,1807-03-20,2305-08-17 01:32:44 +,2021-02-21,2802-04-21 18:48:18.5933838 +VNRXWQ,1883-02-06,2287-07-17 16:46:58.287 +FROPIK,2124-10-01,2974-07-06 12:05:08.000146048 +LOTLS,2099-08-04,2181-01-25 01:04:25.000030055 +BEP,2206-08-10,2331-10-09 10:59:51 +WNGFTTY,1843-06-10,2411-01-28 20:03:59 +LOTLS,1957-11-09,2092-06-07 06:42:30.000538454 +CQMTQLI,2031-09-13,1927-02-13 08:39:25.000919094 +GOYJHW,1976-03-06,2805-07-10 10:51:57.00083302 +,1985-01-22,2111-01-10 15:44:28 +SDA,2196-04-12,2462-10-26 19:28:12.733 +ATZJTPECF,1829-10-16,2357-05-08 07:09:09.000482799 +GOYJHW,1959-04-27,\N +FTWURVH,1976-03-10,2683-11-22 13:07:04.66673556 +KL,1980-09-22,2073-08-25 11:51:10.318 +ATZJTPECF,2217-10-22,2808-10-20 16:01:24.558 +NADANUQMW,2037-10-19,2320-04-26 18:50:25.000426922 +FROPIK,2214-02-09,1949-08-18 17:14:38.000703738 +IWEZJHKE,\N,\N +GSJPSIYOU,1948-07-17,2006-09-24 16:01:24.000239251 +IOQIDQBHU,2198-02-08,2073-03-21 15:32:57.617920888 +VNRXWQ,2276-11-16,2072-08-16 17:45:47.48349887 diff --git data/files/fullouter_string_small_1a_old.txt data/files/fullouter_string_small_1a_old.txt new file mode 100644 index 0000000000..505c4032e5 --- /dev/null +++ data/files/fullouter_string_small_1a_old.txt @@ -0,0 +1,38 @@ +,2021-02-21,2802-04-21 18:48:18.5933838 +,1985-01-22,2111-01-10 15:44:28 +VNRXWQ,1883-02-06,2287-07-17 16:46:58.287 +VNRXWQ,2276-11-16,2072-08-16 17:45:47.48349887 +KL,1980-09-22,2073-08-25 11:51:10.318 +FYW,1807-03-20,2305-08-17 01:32:44 +WNGFTTY,2251-08-16,2649-12-21 18:30:42.498 +WNGFTTY,1843-06-10,2411-01-28 20:03:59 +FTWURVH,1976-03-10,2683-11-22 13:07:04.66673556 +CQMTQLI,2031-09-13,1927-02-13 08:39:25.000919094 +CQMTQLI,2090-11-13,2693-03-17 16:19:55.82 +BEP,2141-02-19,2521-06-09 01:20:07.121 +BEP,2206-08-10,2331-10-09 10:59:51 +FROPIK,2023-02-28,2467-05-11 06:04:13.426693647 +FROPIK,2214-02-09,1949-08-18 17:14:38.000703738 +FROPIK,2124-10-01,2974-07-06 12:05:08.000146048 +SDA,2196-04-12,2462-10-26 19:28:12.733 +ATZJTPECF,2217-10-22,2808-10-20 16:01:24.558 +ATZJTPECF,1829-10-16,2357-05-08 07:09:09.000482799 +MXGDMBD,1880-11-01,2765-10-06 13:28:17.000688592 +IWEZJHKE,\N,\N +NADANUQMW,2037-10-19,2320-04-26 18:50:25.000426922 +GOYJHW,1993-04-07,1950-05-04 09:28:22.000114784 +GOYJHW,1976-03-06,2805-07-10 10:51:57.00083302 +GOYJHW,1959-04-27,\N +QTSRKSKB,2144-01-13,2627-12-20 03:38:53.000389266 +IOQIDQBHU,2198-02-08,2073-03-21 15:32:57.617920888 +ZNOUDCR,\N,1988-04-23 08:40:21 +AARNZRVZQ,2000-11-13,2309-06-05 19:54:13 +AARNZRVZQ,2002-10-23,2525-05-12 15:59:35 +BDBMW,2278-04-27,2101-02-21 08:53:34.692 +\N,1865-11-08,2893-04-07 07:36:12 +\N,2250-04-22,2548-03-21 08:23:13.133573801 +\N,1915-02-22,2554-10-27 09:34:30 +LOTLS,1957-11-09,2092-06-07 06:42:30.000538454 +LOTLS,2126-09-16,1977-12-15 15:28:56 +LOTLS,2099-08-04,2181-01-25 01:04:25.000030055 +GSJPSIYOU,1948-07-17,2006-09-24 16:01:24.000239251 diff --git hcatalog/hcatalog-pig-adapter/pom.xml hcatalog/hcatalog-pig-adapter/pom.xml index a1c8ddf057..c026835f52 100644 --- hcatalog/hcatalog-pig-adapter/pom.xml +++ hcatalog/hcatalog-pig-adapter/pom.xml @@ -136,6 +136,13 @@ tests test + + org.apache.hive + hive-standalone-metastore-server + 4.0.0-SNAPSHOT + tests + test + org.apache.hadoop hadoop-mapreduce-client-common diff --git hcatalog/pom.xml hcatalog/pom.xml index 4894e9a16f..91d7dafa9c 100644 --- hcatalog/pom.xml +++ hcatalog/pom.xml @@ -65,6 +65,13 @@ ${hadoop.version} test + + org.apache.hive + hive-standalone-metastore-server + 4.0.0-SNAPSHOT + tests + test + org.apache.pig pig diff --git itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out index b6fff6048f..eb08d5753c 100644 --- itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out +++ itests/hive-blobstore/src/test/results/clientpositive/write_final_output_blobstore.q.out @@ -150,7 +150,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE tag: -1 @@ -425,7 +425,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/AbstractMapJoin.java itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/AbstractMapJoin.java index af446dbcbc..48f85f4610 100644 --- itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/AbstractMapJoin.java +++ itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/AbstractMapJoin.java @@ -13,21 +13,25 @@ */ package org.apache.hive.benchmark.vectorization.mapjoin; +import java.util.ArrayList; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.tez.ObjectCache; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CountCollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CountVectorCollectorTestOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.MapJoinTestImplementation; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestData; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.CreateMapJoinResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.MapJoinPlanVariation; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters; -import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerateUtil; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation; @@ -74,30 +78,31 @@ public void bench() throws Exception { } protected void setupMapJoin(HiveConf hiveConf, long seed, int rowCount, - VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation, - String[] bigTableColumnNames, TypeInfo[] bigTableTypeInfos, int[] bigTableKeyColumnNums, - String[] smallTableValueColumnNames, TypeInfo[] smallTableValueTypeInfos, - int[] bigTableRetainColumnNums, - int[] smallTableRetainKeyColumnNums, int[] smallTableRetainValueColumnNums, - SmallTableGenerationParameters smallTableGenerationParameters) throws Exception { + VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation, + String[] bigTableColumnNames, TypeInfo[] bigTableTypeInfos, + int[] bigTableKeyColumnNums, + String[] smallTableValueColumnNames, TypeInfo[] smallTableValueTypeInfos, + int[] bigTableRetainColumnNums, + int[] smallTableRetainKeyColumnNums, int[] smallTableRetainValueColumnNums, + SmallTableGenerationParameters smallTableGenerationParameters) throws Exception { this.vectorMapJoinVariation = vectorMapJoinVariation; this.mapJoinImplementation = mapJoinImplementation; testDesc = new MapJoinTestDescription( hiveConf, vectorMapJoinVariation, - bigTableColumnNames, bigTableTypeInfos, + bigTableTypeInfos, bigTableKeyColumnNums, - smallTableValueColumnNames, smallTableValueTypeInfos, - bigTableRetainColumnNums, - smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, - smallTableGenerationParameters); + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); // Prepare data. Good for ANY implementation variation. - testData = new MapJoinTestData(rowCount, testDesc, seed, seed * 10); + testData = new MapJoinTestData(rowCount, testDesc, seed); ObjectRegistryImpl objectRegistry = new ObjectRegistryImpl(); ObjectCache.setupObjectRegistry(objectRegistry); - + operator = setupBenchmarkImplementation( mapJoinImplementation, testDesc, testData); @@ -108,15 +113,21 @@ protected void setupMapJoin(HiveConf hiveConf, long seed, int rowCount, */ if (!isVectorOutput) { - bigTableRows = VectorBatchGenerateUtil.generateRowObjectArray( - testDesc.bigTableKeyTypeInfos, testData.getBigTableBatchStream(), - testData.getBigTableBatch(), testDesc.outputObjectInspectors); + bigTableRows = testData.getBigTableBatchSource().getRandomRows(); } else { - bigTableBatches = VectorBatchGenerateUtil.generateBatchArray( - testData.getBigTableBatchStream(), testData.getBigTableBatch()); - + ArrayList bigTableBatchList = new ArrayList(); + VectorRandomBatchSource batchSource = testData.getBigTableBatchSource(); + batchSource.resetBatchIteration(); + while (true) { + VectorizedRowBatch batch = testData.createBigTableBatch(testDesc); + if (!batchSource.fillNextBatch(batch)) { + break; + } + bigTableBatchList.add(batch); + } + bigTableBatches = bigTableBatchList.toArray(new VectorizedRowBatch[0]); } } @@ -131,7 +142,6 @@ protected static MapJoinOperator setupBenchmarkImplementation( MapJoinTestData testData) throws Exception { - // UNDONE: Parameterize for implementation variation? MapJoinDesc mapJoinDesc = MapJoinTestConfig.createMapJoinDesc(testDesc); final boolean isVectorOutput = isVectorOutput(mapJoinImplementation); @@ -141,9 +151,19 @@ protected static MapJoinOperator setupBenchmarkImplementation( (!isVectorOutput ? new CountCollectorTestOperator() : new CountVectorCollectorTestOperator()); - MapJoinOperator operator = + CreateMapJoinResult createMapJoinResult = MapJoinTestConfig.createMapJoinImplementation( - mapJoinImplementation, testDesc, testCollectorOperator, testData, mapJoinDesc); + mapJoinImplementation, testDesc, testData, mapJoinDesc, + /* shareMapJoinTableContainer */ null); + MapJoinOperator operator = createMapJoinResult.mapJoinOperator; + MapJoinTableContainer mapJoinTableContainer = createMapJoinResult.mapJoinTableContainer; + + // Invoke initializeOp methods. + operator.initialize(testDesc.hiveConf, testDesc.inputObjectInspectors); + + // Fixup the mapJoinTables. + operator.setTestMapJoinTableContainer(1, mapJoinTableContainer, null); + return operator; } diff --git itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java index c9da92a754..aa882973ec 100644 --- itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java +++ itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinMultiKeyBenchBase.java @@ -59,7 +59,8 @@ public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, - bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, + bigTableColumnNames, bigTableTypeInfos, + bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, diff --git itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java index a6b47192a8..60b28907a5 100644 --- itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java +++ itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneLongKeyBenchBase.java @@ -57,7 +57,8 @@ public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, - bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, + bigTableColumnNames, bigTableTypeInfos, + bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, diff --git itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java index 1b310385d2..937ede1882 100644 --- itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java +++ itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/mapjoin/MapJoinOneStringKeyBenchBase.java @@ -57,7 +57,8 @@ public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, - bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, + bigTableColumnNames, bigTableTypeInfos, + bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 6c45641a0d..35ad982f20 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -1400,4 +1400,25 @@ public void testDumpExternalTableSetTrue() throws Throwable { .run("select id from t4") .verifyResult(null); // Returns null as create table event doesn't list files } + + @Test + public void testDumpExternalTableWithAddPartitionEvent() throws Throwable { + WarehouseInstance.Tuple tuple = primary.dump("repl dump " + primaryDbName); + + replica.load(replicatedDbName, tuple.dumpLocation); + + tuple = primary.run("use " + primaryDbName) + .run("create external table t1 (place string) partitioned by (country string)") + .run("alter table t1 add partition(country='india')") + .run("alter table t1 add partition(country='us')") + .dump("repl dump " + primaryDbName + " from " + tuple.lastReplicationId + + " with ('hive.repl.include.external.tables'='true')"); + + replica.load(replicatedDbName, tuple.dumpLocation) + .run("use " + replicatedDbName) + .run("show tables like 't1'") + .verifyResult("t1") + .run("show partitions t1") + .verifyResults(new String[] { "country=india", "country=us" }); + } } diff --git itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniDruidKafkaCliDriver.java itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniDruidKafkaCliDriver.java deleted file mode 100644 index 4768975225..0000000000 --- itests/qtest/src/test/java/org/apache/hadoop/hive/cli/TestMiniDruidKafkaCliDriver.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.cli; - -import org.apache.hadoop.hive.cli.control.CliAdapter; -import org.apache.hadoop.hive.cli.control.CliConfigs; - -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TestRule; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -import java.io.File; -import java.util.List; - -@RunWith(Parameterized.class) -public class TestMiniDruidKafkaCliDriver { - - static CliAdapter adapter = new CliConfigs.MiniDruidKafkaCliConfig().getCliAdapter(); - - @Parameters(name = "{0}") - public static List getParameters() throws Exception { - return adapter.getParameters(); - } - - @ClassRule - public static TestRule cliClassRule = adapter.buildClassRule(); - - @Rule - public TestRule cliTestRule = adapter.buildTestRule(); - - private String name; - private File qfile; - - public TestMiniDruidKafkaCliDriver(String name, File qfile) { - this.name = name; - this.qfile = qfile; - } - - @Test - public void testCliDriver() throws Exception { - adapter.runTest(name, qfile); - } - -} diff --git itests/src/test/resources/testconfiguration.properties itests/src/test/resources/testconfiguration.properties index 3a5aec7d6b..64e8183d07 100644 --- itests/src/test/resources/testconfiguration.properties +++ itests/src/test/resources/testconfiguration.properties @@ -512,6 +512,7 @@ minillaplocal.query.files=\ explainanalyze_2.q,\ explainuser_1.q,\ explainuser_4.q,\ + fullouter_mapjoin_1_optimized.q,\ groupby2.q,\ groupby_groupingset_bug.q,\ hybridgrace_hashjoin_1.q,\ @@ -688,6 +689,7 @@ minillaplocal.query.files=\ schema_evol_text_vecrow_part_all_primitive.q,\ schema_evol_text_vecrow_table_llap_io.q,\ schema_evol_text_vecrow_table.q,\ + schema_evol_undecorated.q,\ selectDistinctStar.q,\ semijoin.q,\ semijoin6.q,\ @@ -759,6 +761,10 @@ minillaplocal.query.files=\ vector_create_struct_table.q,\ vector_decimal_2.q,\ vector_decimal_udf.q,\ + vector_full_outer_join.q,\ + vector_fullouter_mapjoin_1_fast.q,\ + vector_fullouter_mapjoin_1_optimized.q,\ + vector_fullouter_mapjoin_1_optimized_passthru.q,\ vector_groupby_cube1.q,\ vector_groupby_grouping_id1.q,\ vector_groupby_grouping_id2.q,\ @@ -1724,13 +1730,12 @@ druid.query.files=druidmini_test1.q,\ druidmini_extractTime.q,\ druidmini_test_alter.q,\ druidmini_floorTime.q, \ - druidmini_masking.q + druidmini_masking.q, \ + druidkafkamini_basic.q, \ + kafka_storage_handler.q druid.llap.local.query.files=druidmini_noop.q -druid.kafka.query.files=druidkafkamini_basic.q \ - kafka_storage_handler.q - # tests to be run by TestErasureCodingHDFSCliDriver and TestCliDriver erasurecoding.shared.query.files=erasure_commands.q diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java index a3dcf9865d..0e4fb97fb9 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliConfigs.java @@ -56,7 +56,6 @@ public CliConfig() { excludesFrom(testConfigProps, "disabled.query.files"); excludesFrom(testConfigProps, "localSpark.only.query.files"); excludesFrom(testConfigProps, "druid.query.files"); - excludesFrom(testConfigProps, "druid.kafka.query.files"); excludesFrom(testConfigProps, "erasurecoding.only.query.files"); excludeQuery("fouter_join_ppr.q"); // Disabled in HIVE-19509 @@ -183,29 +182,6 @@ public MiniDruidCliConfig() { setResultsDir("ql/src/test/results/clientpositive/druid"); setLogDir("itests/qtest/target/tmp/log"); - setInitScript("q_test_druid_init.sql"); - setCleanupScript("q_test_cleanup_druid.sql"); - setHiveConfDir("data/conf/llap"); - setClusterType(MiniClusterType.druid); - setMetastoreType(MetastoreType.sql); - setFsType(QTestUtil.FsType.hdfs); - } catch (Exception e) { - throw new RuntimeException("can't construct cliconfig", e); - } - } - } - - public static class MiniDruidKafkaCliConfig extends AbstractCliConfig { - public MiniDruidKafkaCliConfig() { - super(CoreCliDriver.class); - try { - setQueryDir("ql/src/test/queries/clientpositive"); - - includesFrom(testConfigProps, "druid.kafka.query.files"); - - setResultsDir("ql/src/test/results/clientpositive/druid"); - setLogDir("itests/qtest/target/tmp/log"); - setInitScript("q_test_druid_init.sql"); setCleanupScript("q_test_cleanup_druid.sql"); setHiveConfDir("data/conf/llap"); diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index bfa3d5d7d2..0e8b82930e 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -434,7 +434,6 @@ private void createRemoteDirs() { llap(CoreClusterType.TEZ, FsType.hdfs), llap_local(CoreClusterType.TEZ, FsType.local), none(CoreClusterType.MR, FsType.local), - druid(CoreClusterType.TEZ, FsType.hdfs), druidLocal(CoreClusterType.TEZ, FsType.local), druidKafka(CoreClusterType.TEZ, FsType.hdfs), kafka(CoreClusterType.TEZ, FsType.hdfs); @@ -472,8 +471,6 @@ public static MiniClusterType valueForString(String type) { return llap; } else if (type.equals("llap_local")) { return llap_local; - } else if (type.equals("druid")) { - return druid; } else if (type.equals("druidLocal")) { return druidLocal; } else if (type.equals("druid-kafka")) { @@ -636,8 +633,7 @@ private void setupMiniCluster(HadoopShims shims, String confDir) throws String uriString = fs.getUri().toString(); - if (clusterType == MiniClusterType.druid - || clusterType == MiniClusterType.druidKafka + if (clusterType == MiniClusterType.druidKafka || clusterType == MiniClusterType.druidLocal) { final String tempDir = System.getProperty("test.tmp.dir"); druidCluster = new MiniDruidCluster("mini-druid", @@ -683,7 +679,7 @@ private void setupMiniCluster(HadoopShims shims, String confDir) throws MiniClusterType.llap, MiniClusterType.llap_local, MiniClusterType.druidLocal, - MiniClusterType.druid + MiniClusterType.druidKafka ).contains(clusterType)) { llapCluster = LlapItUtils.startAndGetMiniLlapCluster(conf, setup.zooKeeperCluster, confDir); } else { @@ -699,7 +695,7 @@ private void setupMiniCluster(HadoopShims shims, String confDir) throws conf, numTrackers, uriString, - EnumSet.of(MiniClusterType.llap, MiniClusterType.llap_local, MiniClusterType.druid).contains(clusterType) + EnumSet.of(MiniClusterType.llap, MiniClusterType.llap_local, MiniClusterType.druidKafka).contains(clusterType) ); } } else if (clusterType == MiniClusterType.miniSparkOnYarn) { diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 458158e91b..70cc34d91e 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -1492,6 +1492,9 @@ public void setSchema(String schema) throws SQLException { if (schema == null || schema.isEmpty()) { throw new SQLException("Schema name is null or empty"); } + if (schema.contains(";")) { + throw new SQLException("invalid schema name"); + } Statement stmt = createStatement(); stmt.execute("use " + schema); stmt.close(); diff --git metastore/pom.xml metastore/pom.xml index 7f751a4935..a75ab97ae7 100644 --- metastore/pom.xml +++ metastore/pom.xml @@ -48,11 +48,6 @@ org.apache.hive hive-standalone-metastore-common ${project.version} - - - org.apache.hive - hive-standalone-metastore-server - ${project.version} javolution diff --git ql/pom.xml ql/pom.xml index a55cbe380d..d73deba440 100644 --- ql/pom.xml +++ ql/pom.xml @@ -448,6 +448,11 @@ opencsv ${opencsv.version} + + org.apache.hive + hive-standalone-metastore-server + ${project.version} + org.apache.hive diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java index 02a67cbd39..3762ee5ab0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java @@ -790,7 +790,16 @@ private boolean hasRightPairForLeft(int left, int right) { } private boolean hasAnyFiltered(int alias, List row) { - return row == dummyObj[alias] || hasFilter(alias) && JoinUtil.hasAnyFiltered(getFilterTag(row)); + if (row == dummyObj[alias]) { + return true; + } + if (hasFilter(alias) && row != null) { + ShortWritable shortWritable = (ShortWritable) row.get(row.size() - 1); + if (shortWritable != null) { + return JoinUtil.hasAnyFiltered(shortWritable.get()); + } + } + return false; } protected final boolean hasFilter(int alias) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 79cb54e552..8aa971a639 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -5155,7 +5155,7 @@ private int createView(Hive db, CreateViewDesc crtView) throws HiveException { } } - if (!crtView.isReplace()) { + if (!crtView.isReplace() && !crtView.getIfNotExists()) { // View already exists, thus we should be replacing throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(crtView.getViewName())); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java index 65ec4b27bd..2f20dd4d36 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java @@ -732,6 +732,73 @@ private JSONObject outputPlan(Object work, return outputPlan(work, out, extended, jsonOutput, indent, ""); } + private boolean isInvokeVectorization(Vectorization vectorization) { + + boolean invokeFlag = true; // Assume. + + // The EXPLAIN VECTORIZATION option was specified. + final boolean desireOnly = this.work.isVectorizationOnly(); + final VectorizationDetailLevel desiredVecDetailLevel = + this.work.isVectorizationDetailLevel(); + + switch (vectorization) { + case NON_VECTORIZED: + // Display all non-vectorized leaf objects unless ONLY. + if (desireOnly) { + invokeFlag = false; + } + break; + case SUMMARY: + case OPERATOR: + case EXPRESSION: + case DETAIL: + if (vectorization.rank < desiredVecDetailLevel.rank) { + // This detail not desired. + invokeFlag = false; + } + break; + case SUMMARY_PATH: + case OPERATOR_PATH: + if (desireOnly) { + if (vectorization.rank < desiredVecDetailLevel.rank) { + // Suppress headers and all objects below. + invokeFlag = false; + } + } + break; + default: + throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization); + } + + return invokeFlag; + } + + private boolean isInvokeNonVectorization(Vectorization vectorization) { + + boolean invokeFlag = true; // Assume. + + // Do not display vectorization objects. + switch (vectorization) { + case SUMMARY: + case OPERATOR: + case EXPRESSION: + case DETAIL: + invokeFlag = false; + break; + case NON_VECTORIZED: + // No action. + break; + case SUMMARY_PATH: + case OPERATOR_PATH: + // Always include headers since they contain non-vectorized objects, too. + break; + default: + throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization); + } + + return invokeFlag; + } + @VisibleForTesting JSONObject outputPlan(Object work, PrintStream out, boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception { @@ -756,65 +823,17 @@ public JSONObject outputPlan(Object work, PrintStream out, if (extended) { invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels()); } else { - invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels()); + invokeFlag = + Level.DEFAULT.in(xpl_note.explainLevels()) || + (this.work != null && this.work.isDebug() && Level.DEBUG.in(xpl_note.explainLevels())); } } if (invokeFlag) { Vectorization vectorization = xpl_note.vectorization(); if (this.work != null && this.work.isVectorization()) { - - // The EXPLAIN VECTORIZATION option was specified. - final boolean desireOnly = this.work.isVectorizationOnly(); - final VectorizationDetailLevel desiredVecDetailLevel = - this.work.isVectorizationDetailLevel(); - - switch (vectorization) { - case NON_VECTORIZED: - // Display all non-vectorized leaf objects unless ONLY. - if (desireOnly) { - invokeFlag = false; - } - break; - case SUMMARY: - case OPERATOR: - case EXPRESSION: - case DETAIL: - if (vectorization.rank < desiredVecDetailLevel.rank) { - // This detail not desired. - invokeFlag = false; - } - break; - case SUMMARY_PATH: - case OPERATOR_PATH: - if (desireOnly) { - if (vectorization.rank < desiredVecDetailLevel.rank) { - // Suppress headers and all objects below. - invokeFlag = false; - } - } - break; - default: - throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization); - } + invokeFlag = isInvokeVectorization(vectorization); } else { - // Do not display vectorization objects. - switch (vectorization) { - case SUMMARY: - case OPERATOR: - case EXPRESSION: - case DETAIL: - invokeFlag = false; - break; - case NON_VECTORIZED: - // No action. - break; - case SUMMARY_PATH: - case OPERATOR_PATH: - // Always include headers since they contain non-vectorized objects, too. - break; - default: - throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization); - } + invokeFlag = isInvokeNonVectorization(vectorization); } } if (invokeFlag) { @@ -892,64 +911,18 @@ public JSONObject outputPlan(Object work, PrintStream out, if (extended) { invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels()); } else { - invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels()); + invokeFlag = + Level.DEFAULT.in(xpl_note.explainLevels()) || + (this.work != null && this.work.isDebug() && Level.DEBUG.in(xpl_note.explainLevels())); } } if (invokeFlag) { Vectorization vectorization = xpl_note.vectorization(); - if (this.work != null && this.work.isVectorization()) { - - // The EXPLAIN VECTORIZATION option was specified. - final boolean desireOnly = this.work.isVectorizationOnly(); - final VectorizationDetailLevel desiredVecDetailLevel = - this.work.isVectorizationDetailLevel(); - - switch (vectorization) { - case NON_VECTORIZED: - // Display all non-vectorized leaf objects unless ONLY. - if (desireOnly) { - invokeFlag = false; - } - break; - case SUMMARY: - case OPERATOR: - case EXPRESSION: - case DETAIL: - if (vectorization.rank < desiredVecDetailLevel.rank) { - // This detail not desired. - invokeFlag = false; - } - break; - case SUMMARY_PATH: - case OPERATOR_PATH: - if (desireOnly) { - if (vectorization.rank < desiredVecDetailLevel.rank) { - // Suppress headers and all objects below. - invokeFlag = false; - } - } - break; - default: - throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization); - } - } else { - // Do not display vectorization objects. - switch (vectorization) { - case SUMMARY: - case OPERATOR: - case EXPRESSION: - case DETAIL: - invokeFlag = false; - break; - case NON_VECTORIZED: - // No action. - break; - case SUMMARY_PATH: - case OPERATOR_PATH: - // Always include headers since they contain non-vectorized objects, too. - break; - default: - throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization); + if (invokeFlag) { + if (this.work != null && this.work.isVectorization()) { + invokeFlag = isInvokeVectorization(vectorization); + } else { + invokeFlag = isInvokeNonVectorization(vectorization); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java index a914ce3d0e..1aae142ba7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java @@ -140,7 +140,7 @@ public static int populateJoinKeyValue(List[] outMap, if (key == (byte) posBigTableAlias) { valueFields.add(null); } else { - valueFields.add(ExprNodeEvaluatorFactory.get(expr, conf)); + valueFields.add(expr == null ? null : ExprNodeEvaluatorFactory.get(expr, conf)); } } outMap[key] = valueFields; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java index f45a0123dd..114cea91d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java @@ -27,6 +27,7 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.Constants; @@ -41,12 +42,16 @@ import org.apache.hadoop.hive.ql.exec.persistence.KeyValueContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer.KeyValueHelper; +import org.apache.hadoop.hive.ql.exec.persistence.AbstractRowContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinRowContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; +import org.apache.hadoop.hive.ql.exec.persistence.ReusableGetAdaptorDirectAccess; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.NonMatchedSmallTableIterator; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.persistence.ObjectContainer; import org.apache.hadoop.hive.ql.exec.persistence.UnwrapRowContainer; import org.apache.hadoop.hive.ql.exec.spark.SparkUtilities; @@ -66,7 +71,9 @@ import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import org.apache.hive.common.util.ReflectionUtil; @@ -74,8 +81,8 @@ import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; - import com.esotericsoftware.kryo.KryoException; +import com.google.common.base.Preconditions; /** * Map side Join operator implementation. @@ -105,6 +112,23 @@ protected HybridHashTableContainer firstSmallTable; // The first small table; // Only this table has spilled big table rows + /* + * FULL OUTER MapJoin members. + */ + protected transient boolean isFullOuterMapJoin; // Are we doing a FULL OUTER MapJoin? + + protected transient int fullOuterBigTableRetainSize; + // The number of Big Table columns being + // retained in the output result for + // FULL OUTER MapJoin. + + /* + * Small Table key match tracking used for FULL OUTER MapJoin. Otherwise, null. + * Since the Small Table hash table can be shared among vertces, we require this non-shared object + * for our vertex (i.e. operator private) key match tracking. + */ + protected transient MatchTracker matchTracker; + protected transient boolean isTestingNoHashTableLoad; // Only used in bucket map join. private transient int numBuckets = -1; @@ -177,6 +201,8 @@ protected void initializeOp(Configuration hconf) throws HiveException { hybridMapJoinLeftover = false; firstSmallTable = null; + doFullOuterMapJoinInit(); + generateMapMetaData(); isTestingNoHashTableLoad = HiveConf.getBoolVar(hconf, @@ -252,6 +278,24 @@ protected void completeInitializationOp(Object[] os) throws HiveException { } } + /* + * Do initialization for FULL OUTER MapJoin. + * + * Currently, we do not support FULL OUTER MapJoin for N-way. + */ + private void doFullOuterMapJoinInit() { + + // This will be set during the first process call or during closeOp if no rows processed. + matchTracker = null; + + isFullOuterMapJoin = (condn.length == 1 && condn[0].getType() == JoinDesc.FULL_OUTER_JOIN); + if (isFullOuterMapJoin) { + fullOuterBigTableRetainSize = conf.getRetainList().get(posBigTable).size(); + } else { + fullOuterBigTableRetainSize = 0; + } + } + @VisibleForTesting public void setTestMapJoinTableContainer(int posSmallTable, MapJoinTableContainer testMapJoinTableContainer, @@ -415,6 +459,16 @@ public void cleanUpInputFileChangedOp() throws HiveException { return dest.setFromRow(row, joinKeys[alias], joinKeysObjectInspectors[alias]); } + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + protected JoinUtil.JoinResult setMapJoinKeyNoNulls( + ReusableGetAdaptor dest, Object row, byte alias, MatchTracker matchTracker) + throws HiveException { + return dest.setFromRowNoNulls(row, joinKeys[alias], joinKeysObjectInspectors[alias], matchTracker); + } + protected MapJoinKey getRefKey(byte alias) { // We assume that since we are joining on the same key, all tables would have either // optimized or non-optimized key; hence, we can pass any key in any table as reference. @@ -437,6 +491,10 @@ public void process(Object row, int tag) throws HiveException { for (byte pos = 0; pos < order.length; pos++) { if (pos != alias) { hashMapRowGetters[pos] = mapJoinTables[pos].createGetter(refKey); + if (isFullOuterMapJoin) { + matchTracker = + ((ReusableGetAdaptorDirectAccess) hashMapRowGetters[pos]).createMatchTracker(); + } } } } @@ -463,7 +521,13 @@ public void process(Object row, int tag) throws HiveException { ReusableGetAdaptor adaptor; if (firstSetKey == null) { adaptor = firstSetKey = hashMapRowGetters[pos]; - joinResult = setMapJoinKey(firstSetKey, row, alias); + if (!isFullOuterMapJoin) { + // Normal case. + joinResult = setMapJoinKey(firstSetKey, row, alias); + } else { + // FULL OUTER MapJoin. We do not want keys with any NULLs to get tracked. + joinResult = setMapJoinKeyNoNulls(firstSetKey, row, alias, matchTracker); + } } else { // Keys for all tables are the same, so only the first has to deserialize them. adaptor = hashMapRowGetters[pos]; @@ -544,8 +608,139 @@ protected void spillBigTableRow(MapJoinTableContainer hybridHtContainer, Object bigTable.add(row); } + /* + * For FULL OUTER MapJoin, create a key match tracker on the Small Table. + */ + private void createMatchTracker(MapJoinTableContainer smallTable) { + ReusableGetAdaptor hashMapRowGetter = smallTable.createGetter(null); + matchTracker = + ((ReusableGetAdaptorDirectAccess) hashMapRowGetter).createMatchTracker(); + Preconditions.checkState(matchTracker != null); + } + + private byte findSmallTable() { + byte smallTablePos = -1; + for (byte pos = 0; pos < mapJoinTables.length; pos++) { + if (pos != conf.getPosBigTable()) { + smallTablePos = pos; + break; + } + } + Preconditions.checkState(smallTablePos != -1); + return smallTablePos; + } + + /* + * For FULL OUTER MapJoin, find the non matched Small Table keys and values and odd them to the + * join output result. + */ + protected void generateFullOuterSmallTableNoMatches(byte smallTablePos, + MapJoinTableContainer substituteSmallTable) throws HiveException { + + // FUTURE: Currently, in the MapJoinOperaotr, we only support FULL OUTER MapJoin for + // FUTURE MapJoinBytesTableContainer. NOTE: Vectorization code will override this method. + + if (matchTracker == null) { + + // When the process method isn't called (i.e. no rows), then we need to create the + // MatchTracker here. + // + // It will indicate no matches, of course. + // + createMatchTracker(substituteSmallTable); + } + + boolean isSmallTableValuesOnly = false; + int[] smallTableValuesIndex = conf.getValueIndex(smallTablePos); + if (smallTableValuesIndex == null) { + List valuesList = conf.getRetainList().get(smallTablePos); + smallTableValuesIndex = + ArrayUtils.toPrimitive(valuesList.toArray(new Integer[0])); + isSmallTableValuesOnly = true; + } + final int smallTableValuesIndexSize = smallTableValuesIndex.length; + + // Our first output column for Small Table results is based on order. (The Big Table columns + // will all be NULL). + final int firstOutputColumnNum = (posBigTable == (byte) 0 ? fullOuterBigTableRetainSize : 0); + + /* + * Create iterator that produces each non-matched Small Table key and a ReusableRowContainer + * the Small Table values. + */ + NonMatchedSmallTableIterator nonMatchedIterator = + substituteSmallTable.createNonMatchedSmallTableIterator(matchTracker); + int nonMatchedKeyCount = 0; + int nonMatchedValueCount = 0; + while (nonMatchedIterator.isNext()) { + List keyObjList = nonMatchedIterator.getCurrentKey(); + + MapJoinRowContainer values = nonMatchedIterator.getCurrentRows(); + AbstractRowContainer.RowIterator> iter = values.rowIter(); + for (List valueObjList = iter.first(); + valueObjList != null; + valueObjList = iter.next()) { + + // Form non-matched Small Table join result. We only fill in the Small Table columns, + // so the Big Table retained columns are NULLs from the new allocation. + + Object[] row = new Object[fullOuterBigTableRetainSize + smallTableValuesIndexSize]; + int outputColumnNum = firstOutputColumnNum; + + if (isSmallTableValuesOnly) { + for (int i = 0; i < smallTableValuesIndexSize; i++) { + row[outputColumnNum++] = valueObjList.get(smallTableValuesIndex[i]); + } + } else { + for (int i = 0; i < smallTableValuesIndexSize; i++) { + final int index = smallTableValuesIndex[i]; + + if (index >= 0) { + + // Zero and above numbers indicate a big table key is needed for + // small table result "area". + + row[outputColumnNum++] = keyObjList.get(index); + } else { + + // Negative numbers indicate a column to be (deserialize) read from the small table's + // LazyBinary value row. + + int smallTableValueIndex = -index - 1; + + row[outputColumnNum++] = valueObjList.get(smallTableValueIndex); + } + } + } + + Object standardCopyRow = + ObjectInspectorUtils.copyToStandardObject( + row, outputObjInspector, ObjectInspectorCopyOption.WRITABLE); + + // FUTURE: Support residual filters for non-equi joins. + internalForward(standardCopyRow, outputObjInspector); + nonMatchedValueCount++; + } + + nonMatchedKeyCount++; + } + } + @Override public void closeOp(boolean abort) throws HiveException { + + if (isFullOuterMapJoin) { + + // FULL OUTER MapJoin: After matching the Big Table row keys against the Small Table, we now + // add any non matched Small Table key and values to the join output result. + + // FUTURE: Currently, we only support FULL OUTER MapJoin for single condition MapJoins. + byte smallTablePos = findSmallTable(); + generateFullOuterSmallTableNoMatches( + smallTablePos, + (MapJoinTableContainer) mapJoinTables[smallTablePos]); + } + boolean spilled = false; for (MapJoinTableContainer container : mapJoinTables) { if (container != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index 38316bf7fa..e064f34185 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -924,51 +924,18 @@ protected long getNextCntr(long cntr) { protected void forward(Object row, ObjectInspector rowInspector) throws HiveException { - forward(row, rowInspector, false); - } - - protected void forward(VectorizedRowBatch vrg, ObjectInspector rowInspector) - throws HiveException { - forward(vrg, rowInspector, true); - } - - protected void forward(Object row, ObjectInspector rowInspector, boolean isVectorized) - throws HiveException { - if (isVectorized) { - vectorForward((VectorizedRowBatch) row, rowInspector); - } else { - baseForward(row, rowInspector); - } - } - - private void vectorForward(VectorizedRowBatch vrg, ObjectInspector rowInspector) - throws HiveException { - this.runTimeNumRows += vrg.count(); + runTimeNumRows++; if (getDone()) { return; } - // Data structures to store original values - final int size = vrg.size; - final boolean selectedInUse = vrg.selectedInUse; - final boolean saveState = (selectedInUse && multiChildren); - if (saveState) { - System.arraycopy(vrg.selected, 0, selected, 0, size); - } - int childrenDone = 0; for (int i = 0; i < childOperatorsArray.length; i++) { Operator o = childOperatorsArray[i]; if (o.getDone()) { childrenDone++; } else { - o.process(vrg, childOperatorsTag[i]); - // Restore original values - vrg.size = size; - vrg.selectedInUse = selectedInUse; - if (saveState) { - System.arraycopy(selected, 0, vrg.selected, 0, size); - } + o.process(row, childOperatorsTag[i]); } } @@ -978,26 +945,49 @@ private void vectorForward(VectorizedRowBatch vrg, ObjectInspector rowInspector) } } - private void baseForward(Object row, ObjectInspector rowInspector) + /* + * Forward a VectorizedRowBatch to the children operators. + */ + protected void vectorForward(VectorizedRowBatch batch) throws HiveException { - this.runTimeNumRows++; + + runTimeNumRows++; if (getDone()) { return; } - int childrenDone = 0; - for (int i = 0; i < childOperatorsArray.length; i++) { - Operator o = childOperatorsArray[i]; - if (o.getDone()) { - childrenDone++; - } else { - o.process(row, childOperatorsTag[i]); - } + // Data structures to store original values + final int size = batch.size; + final boolean selectedInUse = batch.selectedInUse; + final boolean saveState = (selectedInUse && multiChildren); + if (saveState) { + System.arraycopy(batch.selected, 0, selected, 0, size); } - // if all children are done, this operator is also done - if (childrenDone != 0 && childrenDone == childOperatorsArray.length) { - setDone(true); + final int childSize = childOperatorsArray.length; + if (childSize == 1) { + childOperatorsArray[0].process(batch, childOperatorsTag[0]); + } else { + int childrenDone = 0; + for (int i = 0; i < childOperatorsArray.length; i++) { + Operator o = childOperatorsArray[i]; + if (o.getDone()) { + childrenDone++; + } else { + o.process(batch, childOperatorsTag[i]); + + // Restore original values + batch.size = size; + batch.selectedInUse = selectedInUse; + if (saveState) { + System.arraycopy(selected, 0, batch.selected, 0, size); + } + } + } + // if all children are done, this operator is also done + if (childrenDone != 0 && childrenDone == childOperatorsArray.length) { + setDone(true); + } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java index 744cdf5d28..cd0637a971 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java @@ -122,7 +122,11 @@ public void process(Object row, int tag) throws HiveException { if (conf != null && conf.isGatherStats()) { gatherStats(row); } - forward(row, inputObjInspectors[tag], vectorized); + if (vectorized) { + vectorForward((VectorizedRowBatch) row); + } else { + forward(row, inputObjInspectors[tag]); + } } private boolean checkSetDone(Object row, int tag) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java index add8bda8ee..a6b0dbc0dc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/BytesBytesMultiHashMap.java @@ -466,16 +466,18 @@ public void put(KvSource kv, int keyHashCode) throws SerDeException { * @param key Key buffer. * @param offset the offset to the key in the buffer * @param hashMapResult The object to fill in that can read the values. + * @param matchTracker Opitional object for tracking key matches. * @return The state byte. */ - public byte getValueResult(byte[] key, int offset, int length, Result hashMapResult) { + public byte getValueResult(byte[] key, int offset, int length, Result hashMapResult, + MatchTracker matchTracker) { hashMapResult.forget(); WriteBuffers.Position readPos = hashMapResult.getReadPos(); // First, find first record for the key. - long ref = findKeyRefToRead(key, offset, length, readPos); + long ref = findKeyRefToRead(key, offset, length, readPos, matchTracker); if (ref == 0) { return 0; } @@ -499,6 +501,54 @@ public void populateValue(WriteBuffers.ByteSegmentRef valueRef) { writeBuffers.populateValue(valueRef); } + /** + * Finds the next non matched Small Table key and value. Supports FULL OUTER MapJoin. + * + * @param currentSlotNum Start by specifying -1; the return index from the previous call. + * @param keyRef If the return value is not -1, a reference to the key bytes. + * @param hashMapResult If the return value is not -1, the key's values. + * @param matchTracker The object that tracks matches (non-shared). + * @return The current index of the non-matched key; or -1 if no more. + */ + public int findNextNonMatched(int currentSlotNum, WriteBuffers.ByteSegmentRef keyRef, + Result hashMapResult, MatchTracker matchTracker) { + currentSlotNum++; + + hashMapResult.forget(); + + WriteBuffers.Position readPos = hashMapResult.getReadPos(); + + while (true) { + if (currentSlotNum >= refs.length) { + + // No more. + return -1; + } + long ref = refs[currentSlotNum]; + if (ref != 0 && !matchTracker.wasMatched(currentSlotNum)) { + + // An unmatched key. + writeBuffers.setReadPoint(getFirstRecordLengthsOffset(ref, readPos), readPos); + int valueLength = (int) writeBuffers.readVLong(readPos); + int keyLength = (int) writeBuffers.readVLong(readPos); + long keyOffset = Ref.getOffset(ref) - (valueLength + keyLength); + + keyRef.reset(keyOffset, keyLength); + if (keyLength > 0) { + writeBuffers.populateValue(keyRef); + } + + boolean hasList = Ref.hasList(ref); + long offsetAfterListRecordKeyLen = hasList ? writeBuffers.getReadPoint(readPos) : 0; + + hashMapResult.set(this, Ref.getOffset(ref), hasList, offsetAfterListRecordKeyLen); + + return currentSlotNum; + } + currentSlotNum++; + } + } + /** * Number of keys in the hashmap * @return number of keys @@ -516,8 +566,12 @@ public int getNumValues() { return numValues; } + public int getNumHashBuckets() { + return refs.length; + } + /** - * Number of bytes used by the hashmap + * Number of bytes used by the hashmap. * There are two main components that take most memory: writeBuffers and refs * Others include instance fields: 100 * @return number of bytes @@ -614,7 +668,7 @@ private int findKeySlotToWrite(long keyOffset, int keyLength, int hashCode) { * @return The ref to use for reading. */ private long findKeyRefToRead(byte[] key, int offset, int length, - WriteBuffers.Position readPos) { + WriteBuffers.Position readPos, MatchTracker matchTracker) { final int bucketMask = (refs.length - 1); int hashCode = writeBuffers.hashCode(key, offset, length); int slot = hashCode & bucketMask; @@ -629,6 +683,13 @@ private long findKeyRefToRead(byte[] key, int offset, int length, return 0; } if (isSameKey(key, offset, length, ref, hashCode, readPos)) { + + if (matchTracker != null) { + + // Support for FULL OUTER MapJoin. Track matches of the slot table entry. + matchTracker.trackMatch(slot); + } + return ref; } ++metricGetConflict; @@ -897,7 +958,7 @@ public void debugDumpTable() { dump.append(Utils.toStringBinary(key, 0, key.length)).append(" ref [").append(dumpRef(ref)) .append("]: "); Result hashMapResult = new Result(); - getValueResult(key, 0, key.length, hashMapResult); + getValueResult(key, 0, key.length, hashMapResult, null); List results = new ArrayList(); WriteBuffers.ByteSegmentRef byteSegmentRef = hashMapResult.first(); while (byteSegmentRef != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java index 765a647275..ae84d2d6f0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HashMapWrapper.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.NonMatchedSmallTableIterator; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBase; import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBatch; @@ -113,6 +114,7 @@ public void put(MapJoinKey key, MapJoinRowContainer value) { public int size() { return mHash.size(); } + @Override public Set> entrySet() { return mHash.entrySet(); @@ -140,6 +142,12 @@ public ReusableGetAdaptor createGetter(MapJoinKey keyTypeFromLoader) { return new GetAdaptor(keyTypeFromLoader); } + @Override + public NonMatchedSmallTableIterator createNonMatchedSmallTableIterator( + MatchTracker matchTracker) { + throw new RuntimeException("Not applicable"); + } + @Override public long getEstimatedMemorySize() { // TODO: Key and Values are Object[] which can be eagerly deserialized or lazily deserialized. To accurately @@ -187,6 +195,14 @@ public GetAdaptor(MapJoinKey key) { } } + @Override + public JoinUtil.JoinResult setFromVectorNoNulls(VectorHashKeyWrapperBase kw, + VectorExpressionWriter[] keyOutputWriters, VectorHashKeyWrapperBatch keyWrapperBatch, + MatchTracker matchTracer) + throws HiveException { + throw new RuntimeException("Not supported"); + } + @Override public JoinUtil.JoinResult setFromRow(Object row, List fields, List ois) throws HiveException { @@ -207,6 +223,12 @@ public GetAdaptor(MapJoinKey key) { } } + @Override + public JoinUtil.JoinResult setFromRowNoNulls(Object row, List fields, + List ois, MatchTracker matchTracker) throws HiveException { + throw new RuntimeException("Not supported"); + } + @Override public JoinUtil.JoinResult setFromOther(ReusableGetAdaptor other) { assert other instanceof GetAdaptor; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java index 13f1702d7e..54377428ea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer.KeyValueHelper; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.NonMatchedSmallTableIterator; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.exec.vector.rowbytescontainer.VectorRowBytesContainer; import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBase; @@ -95,6 +96,9 @@ /** The OI used to deserialize values. We never deserialize keys. */ private LazyBinaryStructObjectInspector internalValueOi; + private MapJoinObjectSerDeContext keyContext; + private MapJoinObjectSerDeContext valueContext; + private AbstractSerDe keySerde; private boolean[] sortableSortOrders; private byte[] nullMarkers; private byte[] notNullMarkers; @@ -775,6 +779,12 @@ public ReusableGetAdaptor createGetter(MapJoinKey keyTypeFromLoader) { return new GetAdaptor(); } + @Override + public NonMatchedSmallTableIterator createNonMatchedSmallTableIterator( + MatchTracker matchTracker) { + throw new RuntimeException("Not applicable"); + } + @Override public void seal() { for (HashPartition hp : hashPartitions) { @@ -834,6 +844,18 @@ public GetAdaptor() { sortableSortOrders, nullMarkers, notNullMarkers)); } + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + @Override + public JoinUtil.JoinResult setFromVectorNoNulls(VectorHashKeyWrapperBase kw, + VectorExpressionWriter[] keyOutputWriters, VectorHashKeyWrapperBatch keyWrapperBatch, + MatchTracker matchTracker) + throws HiveException { + throw new RuntimeException("Not supported"); + } + @Override public JoinUtil.JoinResult setFromRow(Object row, List fields, List ois) throws HiveException { @@ -850,6 +872,16 @@ public GetAdaptor() { sortableSortOrders, nullMarkers, notNullMarkers)); } + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + @Override + public JoinUtil.JoinResult setFromRowNoNulls(Object row, List fields, + List ois, MatchTracker matchTracker) throws HiveException { + throw new RuntimeException("Not supported"); + } + @Override public JoinUtil.JoinResult setFromOther(ReusableGetAdaptor other) throws HiveException { assert other instanceof GetAdaptor; @@ -884,8 +916,14 @@ public MapJoinRowContainer getCurrentRows() { @Override public JoinUtil.JoinResult setDirect(byte[] bytes, int offset, int length, - BytesBytesMultiHashMap.Result hashMapResult) { - return currentValue.setDirect(bytes, offset, length, hashMapResult); + BytesBytesMultiHashMap.Result hashMapResult, MatchTracker matchTracker) { + return currentValue.setDirect( + bytes, offset, length, hashMapResult, matchTracker); + } + + @Override + public MatchTracker createMatchTracker() { + throw new RuntimeException("Not supported"); } @Override @@ -935,6 +973,10 @@ public ReusableRowContainer() { clearRows(); } + public BytesBytesMultiHashMap.Result getHashMapResult() { + return hashMapResult; + } + /* Determine if there is a match between big table row and the corresponding hashtable * Three states can be returned: * MATCH: a match is found @@ -963,10 +1005,9 @@ public ReusableRowContainer() { toSpillPartitionId = partitionId; hashMapResult.forget(); return JoinUtil.JoinResult.SPILL; - } - else { + } else { aliasFilter = hashPartitions[partitionId].hashMap.getValueResult(output.getData(), 0, - output.getLength(), hashMapResult); + output.getLength(), hashMapResult, /* matchTracker */ null); dummyRow = null; if (hashMapResult.hasRows()) { return JoinUtil.JoinResult.MATCH; @@ -977,6 +1018,46 @@ public ReusableRowContainer() { } } + public JoinUtil.JoinResult setFromOutput(Output output, MatchTracker matchTracker) { + int keyHash = HashCodeUtil.murmurHash(output.getData(), 0, output.getLength()); + + if (bloom1 != null && !bloom1.testLong(keyHash)) { + /* + * if the keyHash is missing in the bloom filter, then the value cannot + * exist in any of the spilled partition - return NOMATCH + */ + dummyRow = null; + aliasFilter = (byte) 0xff; + hashMapResult.forget(); + return JoinResult.NOMATCH; + } + + partitionId = keyHash & (hashPartitions.length - 1); + + // If the target hash table is on disk, spill this row to disk as well to be processed later + if (isOnDisk(partitionId)) { + toSpillPartitionId = partitionId; + hashMapResult.forget(); + return JoinUtil.JoinResult.SPILL; + } else { + aliasFilter = hashPartitions[partitionId].hashMap.getValueResult( + output.getData(), 0, output.getLength(), + hashMapResult, + /* matchTracker */ null); + dummyRow = null; + if (hashMapResult.hasRows()) { + return JoinUtil.JoinResult.MATCH; + } else { + aliasFilter = (byte) 0xff; + return JoinUtil.JoinResult.NOMATCH; + } + } + } + + public void reset() { + hashMapResult.forget(); + } + @Override public boolean hasRows() { return hashMapResult.hasRows() || (dummyRow != null); @@ -1094,7 +1175,7 @@ public void write(MapJoinObjectSerDeContext valueContext, ObjectOutputStream out // Direct access. public JoinUtil.JoinResult setDirect(byte[] bytes, int offset, int length, - BytesBytesMultiHashMap.Result hashMapResult) { + BytesBytesMultiHashMap.Result hashMapResult, MatchTracker matchTracker) { int keyHash = HashCodeUtil.murmurHash(bytes, offset, length); partitionId = keyHash & (hashPartitions.length - 1); @@ -1115,8 +1196,10 @@ public void write(MapJoinObjectSerDeContext valueContext, ObjectOutputStream out return JoinUtil.JoinResult.SPILL; } else { - aliasFilter = hashPartitions[partitionId].hashMap.getValueResult(bytes, offset, length, - hashMapResult); + aliasFilter = hashPartitions[partitionId].hashMap.getValueResult( + bytes, offset, length, + hashMapResult, + /* matchTracker */ null); dummyRow = null; if (hashMapResult.hasRows()) { return JoinUtil.JoinResult.MATCH; @@ -1168,10 +1251,27 @@ public int size() { return totalSize; } + public MapJoinObjectSerDeContext getKeyContext() { + return keyContext; + } + + public MapJoinObjectSerDeContext getValueContext() { + return valueContext; + } + @Override public void setSerde(MapJoinObjectSerDeContext keyCtx, MapJoinObjectSerDeContext valCtx) throws SerDeException { - AbstractSerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe(); + + // Save the key and value contexts in case the MapJoinOperator needs them when creating + // a stand alone spill MapJoinBytesTableContainer. + keyContext = keyCtx; + valueContext = valCtx; + + // Save the key serde for possible use by NonMatchedSmallTableIteratorImpl. + keySerde = keyCtx.getSerDe(); + + AbstractSerDe valSerde = valCtx.getSerDe(); if (writeHelper == null) { LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java index b632e1de89..0e4b8df036 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.NonMatchedSmallTableIterator; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBase; import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBatch; @@ -40,6 +41,7 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; @@ -65,6 +67,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.base.Preconditions; + /** * Table container that serializes keys and values using LazyBinarySerDe into * BytesBytesMultiHashMap, with very low memory overhead. However, @@ -88,6 +92,7 @@ * compare the large table keys correctly when we do, we need to serialize them with correct * ordering. Hence, remember the ordering here; it is null if we do use LazyBinarySerDe. */ + private AbstractSerDe keySerde; private boolean[] sortableSortOrders; private byte[] nullMarkers; private byte[] notNullMarkers; @@ -336,9 +341,17 @@ public void setKeyValue(Writable key, Writable val) { @Override public byte updateStateByte(Byte previousValue) { - if (filterGetter == null) return (byte)0xff; + if (!hasTag || filterGetter == null) { + return (byte) 0xff; + } byte aliasFilter = (previousValue == null) ? (byte)0xff : previousValue.byteValue(); - filterGetter.init((BinaryComparable)value); + BinaryComparable binaryComparableValue = (BinaryComparable) value; + if (binaryComparableValue.getLength() == 0) { + + // Skip empty values just like MapJoinEagerRowContainer.read does. + return (byte) 0xff; + } + filterGetter.init(binaryComparableValue); aliasFilter &= filterGetter.getShort(); return aliasFilter; } @@ -407,7 +420,8 @@ public long getEstimatedMemorySize() { @Override public void setSerde(MapJoinObjectSerDeContext keyContext, MapJoinObjectSerDeContext valueContext) throws SerDeException { - AbstractSerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe(); + keySerde = keyContext.getSerDe(); + AbstractSerDe valSerde = valueContext.getSerDe(); if (writeHelper == null) { LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " + valSerde.getClass().getName()); @@ -455,6 +469,12 @@ public ReusableGetAdaptor createGetter(MapJoinKey keyTypeFromLoader) { return new GetAdaptor(); } + @Override + public NonMatchedSmallTableIterator createNonMatchedSmallTableIterator( + MatchTracker matchTracker) { + return new NonMatchedSmallTableIteratorImpl(matchTracker); + } + @Override public void seal() { hashMap.seal(); @@ -541,6 +561,44 @@ public GetAdaptor() { sortableSortOrders, nullMarkers, notNullMarkers)); } + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + @Override + public JoinUtil.JoinResult setFromVectorNoNulls(VectorHashKeyWrapperBase kw, + VectorExpressionWriter[] keyOutputWriters, VectorHashKeyWrapperBatch keyWrapperBatch, + MatchTracker matchTracker) + throws HiveException { + if (nulls == null) { + nulls = new boolean[keyOutputWriters.length]; + currentKey = new Object[keyOutputWriters.length]; + vectorKeyOIs = new ArrayList(); + for (int i = 0; i < keyOutputWriters.length; i++) { + vectorKeyOIs.add(keyOutputWriters[i].getObjectInspector()); + } + } else { + assert nulls.length == keyOutputWriters.length; + } + boolean hasNulls = false; + for (int i = 0; i < keyOutputWriters.length; i++) { + currentKey[i] = keyWrapperBatch.getWritableKeyValue(kw, i, keyOutputWriters[i]); + if (currentKey[i] == null) { + nulls[i] = true; + hasNulls = true; + } else { + nulls[i] = false; + } + } + if (hasNulls) { + currentValue.reset(); + return JoinUtil.JoinResult.NOMATCH; + } + return currentValue.setFromOutput( + MapJoinKey.serializeRow(output, currentKey, vectorKeyOIs, + sortableSortOrders, nullMarkers, notNullMarkers), matchTracker); + } + @Override public JoinUtil.JoinResult setFromRow(Object row, List fields, List ois) throws HiveException { @@ -557,6 +615,36 @@ public GetAdaptor() { sortableSortOrders, nullMarkers, notNullMarkers)); } + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + @Override + public JoinUtil.JoinResult setFromRowNoNulls(Object row, List fields, + List ois, MatchTracker matchTracker) throws HiveException { + if (nulls == null) { + nulls = new boolean[fields.size()]; + currentKey = new Object[fields.size()]; + } + boolean hasNulls = false; + for (int keyIndex = 0; keyIndex < fields.size(); ++keyIndex) { + currentKey[keyIndex] = fields.get(keyIndex).evaluate(row); + if (currentKey[keyIndex] == null) { + nulls[keyIndex] = true; + hasNulls = true; + } else { + nulls[keyIndex] = false; + } + } + if (hasNulls) { + currentValue.reset(); + return JoinUtil.JoinResult.NOMATCH; + } + return currentValue.setFromOutput( + MapJoinKey.serializeRow(output, currentKey, ois, + sortableSortOrders, nullMarkers, notNullMarkers), matchTracker); + } + @Override public JoinUtil.JoinResult setFromOther(ReusableGetAdaptor other) { assert other instanceof GetAdaptor; @@ -591,8 +679,14 @@ public MapJoinRowContainer getCurrentRows() { @Override public JoinUtil.JoinResult setDirect(byte[] bytes, int offset, int length, - BytesBytesMultiHashMap.Result hashMapResult) { - return currentValue.setDirect(bytes, offset, length, hashMapResult); + BytesBytesMultiHashMap.Result hashMapResult, MatchTracker matchTracker) { + return currentValue.setDirect( + bytes, offset, length, hashMapResult, matchTracker); + } + + @Override + public MatchTracker createMatchTracker() { + return MatchTracker.create(hashMap.getNumHashBuckets()); } @Override @@ -619,6 +713,7 @@ public int directSpillPartitionId() { private final LazyBinaryStruct valueStruct; private final boolean needsComplexObjectFixup; private final ArrayList complexObjectArrayBuffer; + private final WriteBuffers.Position noResultReadPos; public ReusableRowContainer() { if (internalValueOi != null) { @@ -639,13 +734,18 @@ public ReusableRowContainer() { } uselessIndirection = new ByteArrayRef(); hashMapResult = new BytesBytesMultiHashMap.Result(); + noResultReadPos = new WriteBuffers.Position(); clearRows(); } + public BytesBytesMultiHashMap.Result getHashMapResult() { + return hashMapResult; + } + public JoinUtil.JoinResult setFromOutput(Output output) { aliasFilter = hashMap.getValueResult( - output.getData(), 0, output.getLength(), hashMapResult); + output.getData(), 0, output.getLength(), hashMapResult, /* matchTracker */ null); dummyRow = null; if (hashMapResult.hasRows()) { return JoinUtil.JoinResult.MATCH; @@ -653,8 +753,24 @@ public ReusableRowContainer() { aliasFilter = (byte) 0xff; return JoinUtil.JoinResult.NOMATCH; } + } - } + public JoinUtil.JoinResult setFromOutput(Output output, MatchTracker matchTracker) { + + aliasFilter = hashMap.getValueResult( + output.getData(), 0, output.getLength(), hashMapResult, matchTracker); + dummyRow = null; + if (hashMapResult.hasRows()) { + return JoinUtil.JoinResult.MATCH; + } else { + aliasFilter = (byte) 0xff; + return JoinUtil.JoinResult.NOMATCH; + } + } + + public void reset() { + hashMapResult.forget(); + } @Override public boolean hasRows() { @@ -773,8 +889,8 @@ public void write(MapJoinObjectSerDeContext valueContext, ObjectOutputStream out // Direct access. public JoinUtil.JoinResult setDirect(byte[] bytes, int offset, int length, - BytesBytesMultiHashMap.Result hashMapResult) { - aliasFilter = hashMap.getValueResult(bytes, offset, length, hashMapResult); + BytesBytesMultiHashMap.Result hashMapResult, MatchTracker matchTracker) { + aliasFilter = hashMap.getValueResult(bytes, offset, length, hashMapResult, matchTracker); dummyRow = null; if (hashMapResult.hasRows()) { return JoinUtil.JoinResult.MATCH; @@ -785,6 +901,71 @@ public void write(MapJoinObjectSerDeContext valueContext, ObjectOutputStream out } } + /** + * For FULL OUTER MapJoin: Iterates through the Small Table hash table and returns the key and + * value rows for any non-matched keys. + */ + private class NonMatchedSmallTableIteratorImpl implements NonMatchedSmallTableIterator { + + private final MatchTracker matchTracker; + + private int currentIndex; + + private final WriteBuffers.ByteSegmentRef keyRef; + private final BytesWritable bytesWritable; + private final ReusableRowContainer currentValue; + + NonMatchedSmallTableIteratorImpl(MatchTracker matchTracker) { + this.matchTracker = matchTracker; + + Preconditions.checkState(keySerde != null); + + currentIndex = -1; + + keyRef = new WriteBuffers.ByteSegmentRef(); + bytesWritable = new BytesWritable(); + + currentValue = new ReusableRowContainer(); + } + + @Override + public boolean isNext() { + + // If another non-matched key is found, the key bytes will be referenced by keyRef, and + // our ReusableRowContainer's BytesBytesMultiHashMap.Result will reference the value rows. + currentIndex = + hashMap.findNextNonMatched( + currentIndex, keyRef, currentValue.getHashMapResult(), matchTracker); + return (currentIndex != -1); + } + + @Override + public List getCurrentKey() throws HiveException { + List deserializedList = + MapJoinKey.deserializeRow( + keyRef.getBytes(), + (int) keyRef.getOffset(), + keyRef.getLength(), + bytesWritable, keySerde); + return deserializedList; + } + + @Override + public ByteSegmentRef getCurrentKeyAsRef() { + return keyRef; + } + + @Override + public MapJoinRowContainer getCurrentRows() { + return currentValue; + } + + @Override + public BytesBytesMultiHashMap.Result getHashMapResult() { + return currentValue.getHashMapResult(); + } + } + public static boolean isSupportedKey(ObjectInspector keyOi) { List keyFields = ((StructObjectInspector)keyOi).getAllStructFieldRefs(); for (StructField field : keyFields) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java index 2e3716c8b1..c44315f3d9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; +import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; /** @@ -171,4 +172,18 @@ public static Output serializeRow(Output byteStream, Object[] fieldData, } return byteStream; } + + /* + * Deserializes a key. Needed for FULL OUTER MapJoin to unpack the Small Table key when + * adding the non matched key to the join output result. + */ + public static List deserializeRow(byte[] keyBytes, int keyOffset, int keyLength, + BytesWritable bytesWritable, AbstractSerDe serde) throws HiveException { + try { + bytesWritable.set(keyBytes, keyOffset, keyLength); + return (List) serde.deserialize(bytesWritable); + } catch (SerDeException e) { + throw new HiveException("Serialization error", e); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java index 345d1f4fa2..1d7aec8170 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java @@ -17,11 +17,16 @@ */ package org.apache.hadoop.hive.ql.exec.persistence; +import java.util.ArrayList; +import java.util.List; + import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; +import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructField; @SuppressWarnings("deprecation") public class MapJoinObjectSerDeContext { @@ -55,6 +60,18 @@ public boolean hasFilterTag() { return hasFilter; } + public String stringify() { + StandardStructObjectInspector standardStructOI = (StandardStructObjectInspector) standardOI; + List structFields = standardStructOI.getAllStructFieldRefs(); + List typeInfoStrings = new ArrayList(); + for (StructField field : structFields) { + ObjectInspector fieldOI = field.getFieldObjectInspector(); + typeInfoStrings.add(fieldOI.getTypeName()); + } + return "[types " + typeInfoStrings.toString() + ", serde=" + serde.getClass().getName() + + ", hasFilter=" + hasFilter + "]"; + } + @Override public String toString() { return "MapJoinObjectSerDeContext [standardOI=" + standardOI + ", serde=" + serde diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainer.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainer.java index 2c4229f23b..74e0b120ea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainer.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainer.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBatch; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; @@ -43,9 +44,17 @@ * Changes current rows to which adaptor is referring to the rows corresponding to * the key represented by a VHKW object, and writers and batch used to interpret it. */ + JoinUtil.JoinResult setFromVector(VectorHashKeyWrapperBase kw, VectorExpressionWriter[] keyOutputWriters, VectorHashKeyWrapperBatch keyWrapperBatch) throws HiveException; + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + JoinUtil.JoinResult setFromVectorNoNulls(VectorHashKeyWrapperBase kw, VectorExpressionWriter[] keyOutputWriters, + VectorHashKeyWrapperBatch keyWrapperBatch, MatchTracker matchTracker) throws HiveException; + /** * Changes current rows to which adaptor is referring to the rows corresponding to * the key represented by a row object, and fields and ois used to interpret it. @@ -53,6 +62,14 @@ JoinUtil.JoinResult setFromRow(Object row, List fields, List ois) throws HiveException; + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + JoinUtil.JoinResult setFromRowNoNulls(Object row, List fields, + List ois, MatchTracker matchTracker) + throws HiveException; + /** * Changes current rows to which adaptor is referring to the rows corresponding to * the key that another adaptor has already deserialized via setFromVector/setFromRow. @@ -81,6 +98,42 @@ MapJoinKey putRow(Writable currentKey, Writable currentValue) throws SerDeException, HiveException, IOException; + /** + * For FULL OUTER MapJoin: Iterates through the Small Table hash table and returns the key and + * value rows for any non-matched keys. + */ + public interface NonMatchedSmallTableIterator { + + /** + * Return true if another non-matched key was found. + */ + boolean isNext(); + + /** + * @return The current key as a desearialized object array after a successful next() call + * that returns true. + * @throws HiveException + */ + List getCurrentKey() throws HiveException; + + /** + * @return The current key as a WriteBuffers.ByteSegmentRef after a successful next() call + * that returns true. + */ + ByteSegmentRef getCurrentKeyAsRef(); + + /** + * @return The container w/the values rows for the current key after a successful next() call + * that returns true. + */ + MapJoinRowContainer getCurrentRows(); + + /** + * @return The value rows has a BytesBytesMultiHashMap result. + */ + BytesBytesMultiHashMap.Result getHashMapResult(); + } + /** * Indicates to the container that the puts have ended; table is now r/o. */ @@ -94,6 +147,12 @@ MapJoinKey putRow(Writable currentKey, Writable currentValue) */ ReusableGetAdaptor createGetter(MapJoinKey keyTypeFromLoader); + /** + * Creates an iterator for going through the hash table and returns the key and value rows for any + * non-matched keys. Supports FULL OUTER MapJoin. + */ + NonMatchedSmallTableIterator createNonMatchedSmallTableIterator(MatchTracker matchTracker); + /** Clears the contents of the table. */ void clear(); @@ -108,7 +167,7 @@ MapJoinKey putRow(Writable currentKey, Writable currentValue) boolean hasSpill(); /** - * Return the size of the hash table + * Return the size of the hash table. */ int size(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MatchTracker.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MatchTracker.java new file mode 100644 index 0000000000..305474b693 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MatchTracker.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.exec.persistence; + +import org.apache.hadoop.hive.ql.util.JavaDataModel; + +import com.google.common.base.Preconditions; + +/** + * Record which hash table slot entries had key matches for FULL OUTER MapJoin. + * Supports partitioned match trackers for HybridHashTableContainer. + */ +public final class MatchTracker { + + /* + * Regular case: + * isPartitioned = false + * The longMatchFlags array: one bit per hash table slot entry. + * If this tracker is underneath a partitioned tracker, then partitionParent is set. + * + * Partitioned case: + * isPartitioned = true + * The partitions array: a tracker for the currently active partitions. + */ + private final boolean isPartitioned; + private final MatchTracker partitionParent; + private final long[] longMatchFlags; + private final MatchTracker[] partitions; + + private MatchTracker(boolean isPartitioned, MatchTracker partitionParent, int count) { + this.isPartitioned = isPartitioned; + this.partitionParent = partitionParent; + if (!isPartitioned) { + final int longMatchFlagsSize = (count + Long.SIZE - 1) / Long.SIZE; + longMatchFlags = new long[longMatchFlagsSize]; + partitions = null; + } else { + longMatchFlags = null; + partitions = new MatchTracker[count]; + } + } + + /* + * Create a regular tracker. + */ + public static MatchTracker create(int logicalHashBucketCount) { + return new MatchTracker(false, null, logicalHashBucketCount); + } + + /* + * Create a partitioned tracker. Use addPartition and clearPartition to maintain the currently + * active partition trackers. + */ + public static MatchTracker createPartitioned(int partitionCount) { + return new MatchTracker(true, null, partitionCount); + } + + public boolean getIsPartitioned() { + return isPartitioned; + } + + public void addPartition(int partitionId, int logicalHashBucketCount) { + partitions[partitionId] = new MatchTracker(false, this, logicalHashBucketCount); + } + + public void clearPartition(int partitionId) { + partitions[partitionId] = null; + } + + public MatchTracker getPartition(int partitionId) { + return partitions[partitionId]; + } + + private boolean isFirstMatch; + + public boolean getIsFirstMatch() { + return isFirstMatch; + } + + /* + * Track a regular hash table slot match. + * If this tracker is underneath a partitioned tracker, the partitioned tracker's first-match + * flag will be updated. + */ + public void trackMatch(int logicalSlotNum) { + + Preconditions.checkState(!isPartitioned); + + final int longWordIndex = logicalSlotNum / Long.SIZE; + final long longBitMask = 1L << (logicalSlotNum % Long.SIZE); + if ((longMatchFlags[longWordIndex] & longBitMask) != 0) { + + // Flag is already on. + isFirstMatch = false; + } else { + longMatchFlags[longWordIndex] |= longBitMask; + isFirstMatch = true; + } + if (partitionParent != null) { + + // Push match flag up. + partitionParent.isFirstMatch = isFirstMatch; + } + } + + /* + * Track a partitioned hash table slot match. + */ + public void trackPartitionMatch(int partitionId, int logicalSlotNum) { + partitions[partitionId].trackMatch(logicalSlotNum); + } + + /* + * Was a regular hash table slot matched? + */ + public boolean wasMatched(int logicalSlotNum) { + final int longWordIndex = logicalSlotNum / Long.SIZE; + final long longBitMask = 1L << (logicalSlotNum % Long.SIZE); + return (longMatchFlags[longWordIndex] & longBitMask) != 0; + } + + /* + * Was a partitioned hash table slot matched? + */ + public boolean wasPartitionMatched(int partitionId, int logicalSlotNum) { + return partitions[partitionId].wasMatched(logicalSlotNum); + } + + public static int calculateEstimatedMemorySize(int count) { + // FUTURE: Partitioning not included yet. + final int longMatchFlagsSize = (count + Long.SIZE - 1) / Long.SIZE; + int size = 0; + JavaDataModel jdm = JavaDataModel.get(); + size += jdm.lengthForLongArrayOfSize(longMatchFlagsSize); + size += jdm.primitive1(); + size += (2 * jdm.object()); + return size; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ReusableGetAdaptorDirectAccess.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ReusableGetAdaptorDirectAccess.java index 3303cc4edc..c54335853b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ReusableGetAdaptorDirectAccess.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ReusableGetAdaptorDirectAccess.java @@ -24,7 +24,9 @@ public interface ReusableGetAdaptorDirectAccess { JoinResult setDirect(byte[] bytes, int offset, int length, - BytesBytesMultiHashMap.Result hashMapResult); + BytesBytesMultiHashMap.Result hashMapResult, MatchTracker matchTracker); int directSpillPartitionId(); + + MatchTracker createMatchTracker(); } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/UnwrapRowContainer.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/UnwrapRowContainer.java index 95400c808e..0ff54ff43d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/UnwrapRowContainer.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/UnwrapRowContainer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; /** @@ -70,6 +71,8 @@ public MapJoinRowContainer setInternal(MapJoinRowContainer internal, Object[] cu return unwrap(iterator.next()); } + private static final ShortWritable ALL_ALIAS_FILTER_SHORT_WRITABLE = new ShortWritable((byte) 0xff); + private List unwrap(List values) { if (values == null) { return null; @@ -90,7 +93,14 @@ public MapJoinRowContainer setInternal(MapJoinRowContainer internal, Object[] cu } } if (tagged) { - unwrapped.add(values.get(values.size() - 1)); // append filter tag + + // Append filter tag. + final int size = values.size(); + if (size == 0) { + unwrapped.add(ALL_ALIAS_FILTER_SHORT_WRITABLE); + } else { + unwrapped.add(values.get(size - 1)); + } } return unwrapped; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java index 2cccb448a7..152dc98215 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java @@ -262,7 +262,7 @@ public DynamicValueRegistryTez call() { e.getMessage()); throw (InterruptedException) e; } else { - throw new RuntimeException("Reduce operator initialization failed", e); + throw new RuntimeException(redWork.getName() + " operator initialization failed", e); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java index c4503ad968..f2400b8292 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java @@ -136,7 +136,7 @@ public void process(Object data, int tag) throws HiveException { throw new HiveException(e); } - forward(data, rowInspector, true); + forward(data, rowInspector); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java index fc675c5dff..19a3eedcb5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -995,6 +996,17 @@ public void assignRow(VectorizedRowBatch batch, int batchIndex, Object[] objects } } + public void assignRow(VectorizedRowBatch batch, int batchIndex, ArrayList objectList) { + final int count = isConvert.length; + for (int i = 0; i < count; i++) { + if (isConvert[i]) { + assignConvertRowColumn(batch, batchIndex, i, objectList.get(i)); + } else { + assignRowColumn(batch, batchIndex, i, objectList.get(i)); + } + } + } + /* * Assign a row from a list of standard objects up to a count */ diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java index bedc12adc8..0cf8491006 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java @@ -259,14 +259,27 @@ void copy(VectorizedRowBatch inBatch, int inBatchIndex, VectorizedRowBatch outBa private CopyRow[] subRowToBatchCopiersByReference; public void init(VectorColumnMapping columnMapping) throws HiveException { - int count = columnMapping.getCount(); + init( + columnMapping.getInputColumns(), + columnMapping.getOutputColumns(), + columnMapping.getTypeInfos()); + } + + public void init(int[] columnMap, TypeInfo[] typeInfos) throws HiveException { + init(columnMap, columnMap, typeInfos); + } + + public void init(int[] inputColumnMap, int[] outputColumnMap, TypeInfo[] typeInfos) + throws HiveException { + + final int count = inputColumnMap.length; subRowToBatchCopiersByValue = new CopyRow[count]; subRowToBatchCopiersByReference = new CopyRow[count]; for (int i = 0; i < count; i++) { - int inputColumn = columnMapping.getInputColumns()[i]; - int outputColumn = columnMapping.getOutputColumns()[i]; - TypeInfo typeInfo = columnMapping.getTypeInfos()[i]; + int inputColumn = inputColumnMap[i]; + int outputColumn = outputColumnMap[i]; + TypeInfo typeInfo = typeInfos[i]; Type columnVectorType = VectorizationContext.getColumnVectorTypeFromTypeInfo(typeInfo); CopyRow copyRowByValue = null; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java index 55f355620f..97166ec4db 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java @@ -444,6 +444,38 @@ public void init(boolean[] columnsToIncludeTruncated) throws HiveException { } + public void init(int[] outputColumns, boolean[] columnsToInclude) throws HiveException { + + Preconditions.checkState( + outputColumns.length == columnsToInclude.length); + + final int columnCount = sourceTypeInfos.length; + allocateArrays(columnCount); + + int includedCount = 0; + final int[] includedIndices = new int[columnCount]; + + for (int i = 0; i < columnCount; i++) { + + if (!columnsToInclude[i]) { + + // Field not included in query. + + } else { + + initTopLevelField(i, outputColumns[i], sourceTypeInfos[i], dataTypePhysicalVariations[i]); + includedIndices[includedCount++] = i; + } + } + + // Optimizing for readField? + if (includedCount < columnCount && deserializeRead.isReadFieldSupported()) { + useReadField = true; + readFieldLogicalIndices = Arrays.copyOf(includedIndices, includedCount); + } + + } + /** * Initialize for converting the source data type that are going to be read with the * DeserializedRead interface passed to the constructor to the target data types desired in diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java index 14ac8ee159..73965ad226 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java @@ -133,7 +133,7 @@ public void process(Object row, int tag) throws HiveException { // All are selected, do nothing } if (vrg.size > 0) { - forward(vrg, null, true); + vectorForward(vrg); } // Restore the original selected vector diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java index 7816cbbf15..a516d6061e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java @@ -1173,7 +1173,7 @@ private void writeGroupRow(VectorAggregationBufferRow agg, DataOutputBuffer buff } private void flushOutput() throws HiveException { - forward(outputBatch, null, true); + vectorForward(outputBatch); outputBatch.reset(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java index 051d338c9a..7edb0592d1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java @@ -88,7 +88,7 @@ public void process(Object row, int tag) throws HiveException { batch.selected[i] = batch.selected[skipSize + i]; } } - forward(row, inputObjInspectors[tag], true); + vectorForward(batch); currCount += batch.size; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java index 497b12dbca..e80a3e20e3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinBaseOperator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -132,7 +133,8 @@ public VectorizationContext getInputVectorizationContext() { int[] smallTableIndices; int smallTableIndicesSize; List smallTableExprs = desc.getExprs().get(posSingleVectorMapJoinSmallTable); - if (desc.getValueIndices() != null && desc.getValueIndices().get(posSingleVectorMapJoinSmallTable) != null) { + if (desc.getValueIndices() != null && + desc.getValueIndices().get(posSingleVectorMapJoinSmallTable) != null) { smallTableIndices = desc.getValueIndices().get(posSingleVectorMapJoinSmallTable); smallTableIndicesSize = smallTableIndices.length; } else { @@ -141,7 +143,8 @@ public VectorizationContext getInputVectorizationContext() { } List smallTableRetainList = desc.getRetainList().get(posSingleVectorMapJoinSmallTable); - final int smallTableRetainSize = smallTableRetainList.size(); + final int smallTableRetainSize = + (smallTableRetainList != null ? smallTableRetainList.size() : 0); int smallTableResultSize = 0; if (smallTableIndicesSize > 0) { @@ -216,6 +219,7 @@ public VectorizationContext getInputVectorizationContext() { return outputTypeInfos; } + @Override public void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); @@ -234,7 +238,6 @@ public void initializeOp(Configuration hconf) throws HiveException { */ @Override protected void internalForward(Object row, ObjectInspector outputOI) throws HiveException { - Object[] values = (Object[]) row; VectorAssignRow va = outputVectorAssignRowMap.get(outputOI); if (va == null) { va = new VectorAssignRow(); @@ -242,7 +245,11 @@ protected void internalForward(Object row, ObjectInspector outputOI) throws Hive outputVectorAssignRowMap.put(outputOI, va); } - va.assignRow(outputBatch, outputBatch.size, values); + if (row instanceof ArrayList) { + va.assignRow(outputBatch, outputBatch.size, (ArrayList) row); + } else { + va.assignRow(outputBatch, outputBatch.size, (Object[]) row); + } ++outputBatch.size; if (outputBatch.size == VectorizedRowBatch.DEFAULT_SIZE) { @@ -251,7 +258,7 @@ protected void internalForward(Object row, ObjectInspector outputOI) throws Hive } private void flushOutput() throws HiveException { - forward(outputBatch, null, true); + vectorForward(outputBatch); outputBatch.reset(); } @@ -263,8 +270,10 @@ public void closeOp(boolean aborted) throws HiveException { tableContainer.dumpMetrics(); } } - if (!aborted && 0 < outputBatch.size) { - flushOutput(); + if (!aborted) { + if (outputBatch.size > 0) { + flushOutput(); + } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java index 2d8e1d7cf9..11ea6f88f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; @@ -183,11 +184,6 @@ protected Object _evaluate(Object row, int version) throws HiveException { } // Now replace the old evaluators with our own joinValues[posBigTable] = vectorNodeEvaluators; - - // Filtering is handled in the input batch processing - if (filterMaps != null) { - filterMaps[posBigTable] = null; - } } @Override @@ -196,6 +192,18 @@ protected Object _evaluate(Object row, int version) throws HiveException { return dest.setFromVector(keyValues[batchIndex], keyOutputWriters, keyWrapperBatch); } + /* + * This variation is for FULL OUTER MapJoin. It does key match tracking only if the key has + * no NULLs. + */ + @Override + protected JoinUtil.JoinResult setMapJoinKeyNoNulls(ReusableGetAdaptor dest, Object row, byte alias, + MatchTracker matchTracker) + throws HiveException { + return dest.setFromVectorNoNulls(keyValues[batchIndex], keyOutputWriters, keyWrapperBatch, + matchTracker); + } + @Override public void process(Object row, int tag) throws HiveException { @@ -241,6 +249,11 @@ public void process(Object row, int tag) throws HiveException { keyValues = null; } + @Override + public void closeOp(boolean aborted) throws HiveException { + super.closeOp(aborted); + } + @Override protected void spillBigTableRow(MapJoinTableContainer hybridHtContainer, Object row) throws HiveException { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java index 07a6e9d3aa..267d0dc5a1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java @@ -326,7 +326,7 @@ protected void internalForward(Object row, ObjectInspector outputOI) throws Hive } private void flushOutput() throws HiveException { - forward(outputBatch, null, true); + vectorForward(outputBatch); outputBatch.reset(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java index 22d2f343b9..2f296c9654 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java @@ -136,7 +136,7 @@ public void process(Object row, int tag) throws HiveException { // Just forward the row as is if (conf.isSelStarNoCompute()) { - forward(row, inputObjInspectors[tag], true); + vectorForward((VectorizedRowBatch) row); return; } @@ -155,7 +155,7 @@ public void process(Object row, int tag) throws HiveException { int originalProjectionSize = vrg.projectionSize; vrg.projectionSize = projectedOutputColumns.length; vrg.projectedColumns = this.projectedOutputColumns; - forward(vrg, outputObjInspector, true); + vectorForward((VectorizedRowBatch) row); // Revert the projected columns back, because vrg will be re-used. vrg.projectionSize = originalProjectionSize; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorTopNKeyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorTopNKeyOperator.java index e28afee5c8..a211308c28 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorTopNKeyOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorTopNKeyOperator.java @@ -128,7 +128,7 @@ public void process(Object data, int tag) throws HiveException { // Forward the result if (size > 0) { - forward(batch, null, true); + vectorForward(batch); } // Restore the original selected vector diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java index c832cdbd05..573368829e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java @@ -21,7 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; +import java.util.Map.Entry; import org.apache.commons.lang.ArrayUtils; import org.slf4j.Logger; @@ -41,7 +41,6 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping; import org.apache.hadoop.hive.ql.exec.vector.VectorCopyRow; import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion; import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator; @@ -55,14 +54,17 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastHashTableLoader; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.BaseWork; -import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.VectorDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType; +import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; +import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKind; +import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation; import org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo; import org.apache.hadoop.hive.ql.plan.api.OperatorType; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; import org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinaryDeserializeRead; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -124,6 +126,10 @@ protected void initLoggingPrefix(String className) { // a mixture of input big table columns and new scratch columns. protected VectorizationContext vOutContext; + protected VectorMapJoinVariation vectorMapJoinVariation; + protected HashTableKind hashTableKind; + protected HashTableKeyType hashTableKeyType; + // The output column projection of the vectorized row batch. And, the type infos of the output // columns. protected int[] outputProjection; @@ -149,28 +155,70 @@ protected void initLoggingPrefix(String className) { protected String[] bigTableValueColumnNames; protected TypeInfo[] bigTableValueTypeInfos; - // This is a mapping of which big table columns (input and key/value expressions) will be - // part of the big table portion of the join output result. - protected VectorColumnOutputMapping bigTableRetainedMapping; + /* + * NOTE: + * The Big Table key columns are from the key expressions. + * The Big Table value columns are from the getExpr(posBigTable) expressions. + * Any calculations needed for those will be scratch columns. + * + * The Small Table key and value output columns are scratch columns. + * + * Big Table Retain Column Map / TypeInfos: + * Any Big Table Batch columns that will be in the output result. + * 0, 1, ore more Column Nums and TypeInfos + * + * Non Outer Small Table Key Mapping: + * For non-[FULL] OUTER MapJoin, when Big Table key columns are not retained for the output + * result but are needed for the Small Table output result, they are put in this mapping + * as they are required for copying rows to the overflow batch. + * + * Outer Small Table Key Mapping + * For [FULL] OUTER MapJoin, the mapping for any Small Table key columns needed for the + * output result from the Big Table key columns. The Big Table keys cannot be projected since + * on NOMATCH there must be a physical column present to hold the non-match NULL. + * + * Full Outer Small Table Key Mapping + * For FULL OUTER MapJoin, the mapping from any needed Small Table key columns to their area + * in the output result. + * + * For deserializing a FULL OUTER non-match Small Table key into the output result. + * Can be partial or empty if some or all Small Table key columns are not retained. + * + * Small Table Value Mapping + * The mapping from Small Table value columns to their area in the output result. + * + * For deserializing Small Table value into the output result. + * + * It is the Small Table value index to output column numbers and TypeInfos. + * That is, a mapping of the LazyBinary field order to output batch scratch columns for the + * small table portion. + * Or, to use the output column nums for OUTER Small Table value NULLs. + * + */ + protected int[] bigTableRetainColumnMap; + protected TypeInfo[] bigTableRetainTypeInfos; + + protected int[] nonOuterSmallTableKeyColumnMap; + protected TypeInfo[] nonOuterSmallTableKeyTypeInfos; - // This is a mapping of which keys will be copied from the big table (input and key expressions) - // to the small table result portion of the output for outer join. - protected VectorColumnOutputMapping bigTableOuterKeyMapping; + protected VectorColumnOutputMapping outerSmallTableKeyMapping; - // This is a mapping of the values in the small table hash table that will be copied to the - // small table result portion of the output. That is, a mapping of the LazyBinary field order - // to output batch scratch columns for the small table portion. - protected VectorColumnSourceMapping smallTableMapping; + protected VectorColumnSourceMapping fullOuterSmallTableKeyMapping; + protected VectorColumnSourceMapping smallTableValueMapping; + + // The MapJoin output result projection for both the Big Table input batch and the overflow batch. protected VectorColumnSourceMapping projectionMapping; // These are the output columns for the small table and the outer small table keys. - protected int[] smallTableOutputVectorColumns; - protected int[] bigTableOuterKeyOutputVectorColumns; + protected int[] outerSmallTableKeyColumnMap; + protected int[] smallTableValueColumnMap; // These are the columns in the big and small table that are ByteColumnVector columns. // We create data buffers for these columns so we can copy strings into those columns by value. protected int[] bigTableByteColumnVectorColumns; + protected int[] nonOuterSmallTableKeyByteColumnVectorColumns; + protected int[] outerSmallTableKeyByteColumnVectorColumns; protected int[] smallTableByteColumnVectorColumns; // The above members are initialized by the constructor and must not be @@ -186,13 +234,22 @@ protected void initLoggingPrefix(String className) { // portion of the join output. protected transient VectorCopyRow bigTableRetainedVectorCopy; + // This helper object deserializes BinarySortable format small table keys into columns of a row + // in a vectorized row batch. + protected int[] allSmallTableKeyColumnNums; + protected boolean[] allSmallTableKeyColumnIncluded; + protected transient VectorDeserializeRow smallTableKeyOuterVectorDeserializeRow; + + protected transient VectorCopyRow nonOuterSmallTableKeyVectorCopy; + + // UNDONE // A helper object that efficiently copies the big table key columns (input or key expressions) - // that appear in the small table portion of the join output for outer joins. - protected transient VectorCopyRow bigTableVectorCopyOuterKeys; + // that appear in the small table portion of the join output. + protected transient VectorCopyRow outerSmallTableKeyVectorCopy; // This helper object deserializes LazyBinary format small table values into columns of a row // in a vectorized row batch. - protected transient VectorDeserializeRow smallTableVectorDeserializeRow; + protected transient VectorDeserializeRow smallTableValueVectorDeserializeRow; // This a 2nd batch with the same "column schema" as the big table batch that can be used to // build join output results in. If we can create some join output results in the big table @@ -207,6 +264,9 @@ protected void initLoggingPrefix(String className) { // Whether the native vectorized map join operator has performed its common setup. protected transient boolean needCommonSetup; + // Whether the native vectorized map join operator has performed its first batch setup. + protected transient boolean needFirstBatchSetup; + // Whether the native vectorized map join operator has performed its // native vector map join hash table setup. protected transient boolean needHashTableSetup; @@ -214,6 +274,9 @@ protected void initLoggingPrefix(String className) { // The small table hash table for the native vectorized map join operator. protected transient VectorMapJoinHashTable vectorMapJoinHashTable; + protected transient long batchCounter; + protected transient long rowCounter; + /** Kryo ctor. */ protected VectorMapJoinCommonOperator() { super(); @@ -246,9 +309,9 @@ public VectorMapJoinCommonOperator(CompilationOpContext ctx, OperatorDesc conf, posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]); isOuterJoin = !desc.getNoOuterJoin(); - Map> filterExpressions = desc.getFilters(); - bigTableFilterExpressions = vContext.getVectorExpressions(filterExpressions.get(posBigTable), - VectorExpressionDescriptor.Mode.FILTER); + vectorMapJoinVariation = this.vectorDesc.getVectorMapJoinVariation(); + hashTableKind = this.vectorDesc.getHashTableKind(); + hashTableKeyType = this.vectorDesc.getHashTableKeyType(); bigTableKeyColumnMap = vectorMapJoinInfo.getBigTableKeyColumnMap(); bigTableKeyColumnNames = vectorMapJoinInfo.getBigTableKeyColumnNames(); @@ -260,11 +323,19 @@ public VectorMapJoinCommonOperator(CompilationOpContext ctx, OperatorDesc conf, bigTableValueTypeInfos = vectorMapJoinInfo.getBigTableValueTypeInfos(); bigTableValueExpressions = vectorMapJoinInfo.getSlimmedBigTableValueExpressions(); - bigTableRetainedMapping = vectorMapJoinInfo.getBigTableRetainedMapping(); + bigTableFilterExpressions = vectorMapJoinInfo.getBigTableFilterExpressions(); + + bigTableRetainColumnMap = vectorMapJoinInfo.getBigTableRetainColumnMap(); + bigTableRetainTypeInfos = vectorMapJoinInfo.getBigTableRetainTypeInfos(); + + nonOuterSmallTableKeyColumnMap = vectorMapJoinInfo.getNonOuterSmallTableKeyColumnMap(); + nonOuterSmallTableKeyTypeInfos = vectorMapJoinInfo.getNonOuterSmallTableKeyTypeInfos(); + + outerSmallTableKeyMapping = vectorMapJoinInfo.getOuterSmallTableKeyMapping(); - bigTableOuterKeyMapping = vectorMapJoinInfo.getBigTableOuterKeyMapping(); + fullOuterSmallTableKeyMapping = vectorMapJoinInfo.getFullOuterSmallTableKeyMapping(); - smallTableMapping = vectorMapJoinInfo.getSmallTableMapping(); + smallTableValueMapping = vectorMapJoinInfo.getSmallTableValueMapping(); projectionMapping = vectorMapJoinInfo.getProjectionMapping(); @@ -273,47 +344,96 @@ public VectorMapJoinCommonOperator(CompilationOpContext ctx, OperatorDesc conf, protected void determineCommonInfo(boolean isOuter) throws HiveException { - bigTableOuterKeyOutputVectorColumns = bigTableOuterKeyMapping.getOutputColumns(); - smallTableOutputVectorColumns = smallTableMapping.getOutputColumns(); + outerSmallTableKeyColumnMap = outerSmallTableKeyMapping.getOutputColumns(); + + smallTableValueColumnMap = smallTableValueMapping.getOutputColumns(); // Which big table and small table columns are ByteColumnVector and need have their data buffer // to be manually reset for some join result processing? - bigTableByteColumnVectorColumns = getByteColumnVectorColumns(bigTableOuterKeyMapping); + bigTableByteColumnVectorColumns = + getByteColumnVectorColumns(bigTableRetainColumnMap, bigTableRetainTypeInfos); + + nonOuterSmallTableKeyByteColumnVectorColumns = + getByteColumnVectorColumns(nonOuterSmallTableKeyColumnMap, nonOuterSmallTableKeyTypeInfos); + + outerSmallTableKeyByteColumnVectorColumns = + getByteColumnVectorColumns(outerSmallTableKeyMapping); - smallTableByteColumnVectorColumns = getByteColumnVectorColumns(smallTableMapping); + smallTableByteColumnVectorColumns = + getByteColumnVectorColumns(smallTableValueMapping); outputProjection = projectionMapping.getOutputColumns(); outputTypeInfos = projectionMapping.getTypeInfos(); - if (LOG.isDebugEnabled()) { + if (LOG.isInfoEnabled()) { int[] orderDisplayable = new int[order.length]; for (int i = 0; i < order.length; i++) { orderDisplayable[i] = (int) order[i]; } - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor order " + Arrays.toString(orderDisplayable)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor posBigTable " + (int) posBigTable); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor posSingleVectorMapJoinSmallTable " + (int) posSingleVectorMapJoinSmallTable); - - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableKeyColumnMap " + Arrays.toString(bigTableKeyColumnMap)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableKeyColumnNames " + Arrays.toString(bigTableKeyColumnNames)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableKeyTypeInfos " + Arrays.toString(bigTableKeyTypeInfos)); - - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableValueColumnMap " + Arrays.toString(bigTableValueColumnMap)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableValueColumnNames " + Arrays.toString(bigTableValueColumnNames)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableValueTypeNames " + Arrays.toString(bigTableValueTypeInfos)); - - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableRetainedMapping " + bigTableRetainedMapping.toString()); - - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableOuterKeyMapping " + bigTableOuterKeyMapping.toString()); - - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor smallTableMapping " + smallTableMapping.toString()); - - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor bigTableByteColumnVectorColumns " + Arrays.toString(bigTableByteColumnVectorColumns)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor smallTableByteColumnVectorColumns " + Arrays.toString(smallTableByteColumnVectorColumns)); + LOG.info(getLoggingPrefix() + " order " + + Arrays.toString(orderDisplayable)); + LOG.info(getLoggingPrefix() + " posBigTable " + + (int) posBigTable); + LOG.info(getLoggingPrefix() + " posSingleVectorMapJoinSmallTable " + + (int) posSingleVectorMapJoinSmallTable); + + LOG.info(getLoggingPrefix() + " bigTableKeyColumnMap " + + Arrays.toString(bigTableKeyColumnMap)); + LOG.info(getLoggingPrefix() + " bigTableKeyColumnNames " + + Arrays.toString(bigTableKeyColumnNames)); + LOG.info(getLoggingPrefix() + " bigTableKeyTypeInfos " + + Arrays.toString(bigTableKeyTypeInfos)); + + LOG.info(getLoggingPrefix() + " bigTableValueColumnMap " + + Arrays.toString(bigTableValueColumnMap)); + LOG.info(getLoggingPrefix() + " bigTableValueColumnNames " + + Arrays.toString(bigTableValueColumnNames)); + LOG.info(getLoggingPrefix() + " bigTableValueTypeNames " + + Arrays.toString(bigTableValueTypeInfos)); + + LOG.info(getLoggingPrefix() + " getBigTableRetainColumnMap " + + Arrays.toString(bigTableRetainColumnMap)); + LOG.info(getLoggingPrefix() + " bigTableRetainTypeInfos " + + Arrays.toString(bigTableRetainTypeInfos)); + + LOG.info(getLoggingPrefix() + " nonOuterSmallTableKeyColumnMap " + + Arrays.toString(nonOuterSmallTableKeyColumnMap)); + LOG.info(getLoggingPrefix() + " nonOuterSmallTableKeyTypeInfos " + + Arrays.toString(nonOuterSmallTableKeyTypeInfos)); + + LOG.info(getLoggingPrefix() + " outerSmallTableKeyMapping " + + outerSmallTableKeyMapping.toString()); + + LOG.info(getLoggingPrefix() + " fullOuterSmallTableKeyMapping " + + fullOuterSmallTableKeyMapping.toString()); + + LOG.info(getLoggingPrefix() + " smallTableValueMapping " + + smallTableValueMapping.toString()); + + LOG.info(getLoggingPrefix() + " bigTableByteColumnVectorColumns " + + Arrays.toString(bigTableByteColumnVectorColumns)); + LOG.info(getLoggingPrefix() + " smallTableByteColumnVectorColumns " + + Arrays.toString(smallTableByteColumnVectorColumns)); + + LOG.info(getLoggingPrefix() + " outputProjection " + + Arrays.toString(outputProjection)); + LOG.info(getLoggingPrefix() + " outputTypeInfos " + + Arrays.toString(outputTypeInfos)); + + LOG.info(getLoggingPrefix() + " mapJoinDesc.getKeysString " + + conf.getKeysString()); + if (conf.getValueIndices() != null) { + for (Entry entry : conf.getValueIndices().entrySet()) { + LOG.info(getLoggingPrefix() + " mapJoinDesc.getValueIndices +" + + (int) entry.getKey() + " " + Arrays.toString(entry.getValue())); + } + } + LOG.info(getLoggingPrefix() + " mapJoinDesc.getExprs " + + conf.getExprs().toString()); + LOG.info(getLoggingPrefix() + " mapJoinDesc.getRetainList " + + conf.getRetainList().toString()); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor outputProjection " + Arrays.toString(outputProjection)); - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor outputTypeInfos " + Arrays.toString(outputTypeInfos)); } setupVOutContext(conf.getOutputColumnNames()); @@ -323,11 +443,14 @@ protected void determineCommonInfo(boolean isOuter) throws HiveException { * Determine from a mapping which columns are BytesColumnVector columns. */ private int[] getByteColumnVectorColumns(VectorColumnMapping mapping) { + return getByteColumnVectorColumns(mapping.getOutputColumns(), mapping.getTypeInfos()); + } + + private int[] getByteColumnVectorColumns(int[] outputColumns, TypeInfo[] typeInfos) { + // Search mapping for any strings and return their output columns. ArrayList list = new ArrayList(); - int count = mapping.getCount(); - int[] outputColumns = mapping.getOutputColumns(); - TypeInfo[] typeInfos = mapping.getTypeInfos(); + final int count = outputColumns.length; for (int i = 0; i < count; i++) { int outputColumn = outputColumns[i]; String typeName = typeInfos[i].getTypeName(); @@ -345,10 +468,12 @@ protected void determineCommonInfo(boolean isOuter) throws HiveException { */ protected void setupVOutContext(List outputColumnNames) { if (LOG.isDebugEnabled()) { - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor outputColumnNames " + outputColumnNames); + LOG.debug(getLoggingPrefix() + " outputColumnNames " + outputColumnNames); } if (outputColumnNames.size() != outputProjection.length) { - throw new RuntimeException("Output column names " + outputColumnNames + " length and output projection " + Arrays.toString(outputProjection) + " / " + Arrays.toString(outputTypeInfos) + " length mismatch"); + throw new RuntimeException("Output column names " + outputColumnNames + + " length and output projection " + Arrays.toString(outputProjection) + + " / " + Arrays.toString(outputTypeInfos) + " length mismatch"); } vOutContext.resetProjectionColumns(); for (int i = 0; i < outputColumnNames.size(); ++i) { @@ -357,7 +482,8 @@ protected void setupVOutContext(List outputColumnNames) { vOutContext.addProjectionColumn(columnName, outputColumn); if (LOG.isDebugEnabled()) { - LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator constructor addProjectionColumn " + i + " columnName " + columnName + " outputColumn " + outputColumn); + LOG.debug(getLoggingPrefix() + " addProjectionColumn " + i + " columnName " + columnName + + " outputColumn " + outputColumn); } } } @@ -386,9 +512,50 @@ protected HashTableLoader getHashTableLoader(Configuration hconf) { return hashTableLoader; } + /* + * Do FULL OUTER MapJoin operator initialization. + */ + private void initializeFullOuterObjects() throws HiveException { + + // The Small Table key type jnfo is the same as Big Table's. + TypeInfo[] smallTableKeyTypeInfos = bigTableKeyTypeInfos; + final int allKeysSize = smallTableKeyTypeInfos.length; + + /* + * The VectorMapJoinFullOuter{Long|MultiKey|String}Operator outputs 0, 1, or more + * Small Key columns in the join result. + */ + allSmallTableKeyColumnNums = new int[allKeysSize]; + Arrays.fill(allSmallTableKeyColumnNums, -1); + allSmallTableKeyColumnIncluded = new boolean[allKeysSize]; + + final int outputKeysSize = fullOuterSmallTableKeyMapping.getCount(); + int[] outputKeyNums = fullOuterSmallTableKeyMapping.getInputColumns(); + int[] outputKeyOutputColumns = fullOuterSmallTableKeyMapping.getOutputColumns(); + for (int i = 0; i < outputKeysSize; i++) { + final int outputKeyNum = outputKeyNums[i]; + allSmallTableKeyColumnNums[outputKeyNum] = outputKeyOutputColumns[i]; + allSmallTableKeyColumnIncluded[outputKeyNum] = true; + } + + if (hashTableKeyType == HashTableKeyType.MULTI_KEY && + outputKeysSize > 0) { + + smallTableKeyOuterVectorDeserializeRow = + new VectorDeserializeRow( + new BinarySortableDeserializeRead( + smallTableKeyTypeInfos, + /* useExternalBuffer */ true)); + smallTableKeyOuterVectorDeserializeRow.init( + allSmallTableKeyColumnNums, allSmallTableKeyColumnIncluded); + } + } + @Override protected void initializeOp(Configuration hconf) throws HiveException { + super.initializeOp(hconf); + VectorExpression.doTransientInit(bigTableFilterExpressions); VectorExpression.doTransientInit(bigTableKeyExpressions); VectorExpression.doTransientInit(bigTableValueExpressions); @@ -405,23 +572,34 @@ protected void initializeOp(Configuration hconf) throws HiveException { /* * Create our vectorized copy row and deserialize row helper objects. */ - if (smallTableMapping.getCount() > 0) { - smallTableVectorDeserializeRow = + if (vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) { + initializeFullOuterObjects(); + } + + if (smallTableValueMapping.getCount() > 0) { + smallTableValueVectorDeserializeRow = new VectorDeserializeRow( new LazyBinaryDeserializeRead( - smallTableMapping.getTypeInfos(), + smallTableValueMapping.getTypeInfos(), /* useExternalBuffer */ true)); - smallTableVectorDeserializeRow.init(smallTableMapping.getOutputColumns()); + smallTableValueVectorDeserializeRow.init(smallTableValueMapping.getOutputColumns()); } - if (bigTableRetainedMapping.getCount() > 0) { + if (bigTableRetainColumnMap.length > 0) { bigTableRetainedVectorCopy = new VectorCopyRow(); - bigTableRetainedVectorCopy.init(bigTableRetainedMapping); + bigTableRetainedVectorCopy.init( + bigTableRetainColumnMap, bigTableRetainTypeInfos); } - if (bigTableOuterKeyMapping.getCount() > 0) { - bigTableVectorCopyOuterKeys = new VectorCopyRow(); - bigTableVectorCopyOuterKeys.init(bigTableOuterKeyMapping); + if (nonOuterSmallTableKeyColumnMap.length > 0) { + nonOuterSmallTableKeyVectorCopy = new VectorCopyRow(); + nonOuterSmallTableKeyVectorCopy.init( + nonOuterSmallTableKeyColumnMap, nonOuterSmallTableKeyTypeInfos); + } + + if (outerSmallTableKeyMapping.getCount() > 0) { + outerSmallTableKeyVectorCopy = new VectorCopyRow(); + outerSmallTableKeyVectorCopy.init(outerSmallTableKeyMapping); } /* @@ -430,6 +608,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { overflowBatch = setupOverflowBatch(); needCommonSetup = true; + needFirstBatchSetup = true; needHashTableSetup = true; if (LOG.isDebugEnabled()) { @@ -553,29 +732,46 @@ private void allocateOverflowBatchColumnVector(VectorizedRowBatch overflowBatch, } /* - * Common one time setup by native vectorized map join operator's processOp. + * Common one time setup for Native Vector MapJoin operator. */ - protected void commonSetup(VectorizedRowBatch batch) throws HiveException { + protected void commonSetup() throws HiveException { - if (LOG.isDebugEnabled()) { - LOG.debug("VectorMapJoinInnerCommonOperator commonSetup begin..."); - displayBatchColumns(batch, "batch"); - displayBatchColumns(overflowBatch, "overflowBatch"); + /* + * Make sure big table BytesColumnVectors have room for string values in the overflow batch... + */ + for (int column: bigTableByteColumnVectorColumns) { + BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column]; + bytesColumnVector.initBuffer(); } - // Make sure big table BytesColumnVectors have room for string values in the overflow batch... - for (int column: bigTableByteColumnVectorColumns) { + for (int column : nonOuterSmallTableKeyByteColumnVectorColumns) { + BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column]; + bytesColumnVector.initBuffer(); + } + + for (int column : outerSmallTableKeyByteColumnVectorColumns) { BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column]; bytesColumnVector.initBuffer(); } + for (int column: smallTableByteColumnVectorColumns) { + BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column]; + bytesColumnVector.initBuffer(); + } + + batchCounter = 0; + rowCounter = 0; + } + + /* + * Common one time setup by native vectorized map join operator's first batch. + */ + public void firstBatchSetup(VectorizedRowBatch batch) throws HiveException { // Make sure small table BytesColumnVectors have room for string values in the big table and // overflow batchs... for (int column: smallTableByteColumnVectorColumns) { BytesColumnVector bytesColumnVector = (BytesColumnVector) batch.cols[column]; bytesColumnVector.initBuffer(); - bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column]; - bytesColumnVector.initBuffer(); } // Setup a scratch batch that will be used to play back big table rows that were spilled @@ -583,6 +779,67 @@ protected void commonSetup(VectorizedRowBatch batch) throws HiveException { spillReplayBatch = VectorizedBatchUtil.makeLike(batch); } + /* + * Perform any Native Vector MapJoin operator specific hash table setup. + */ + public void hashTableSetup() throws HiveException { + } + + /* + * Perform the Native Vector MapJoin operator work. + */ + public abstract void processBatch(VectorizedRowBatch batch) throws HiveException; + + /* + * Common process method for all Native Vector MapJoin operators. + * + * Do common initialization work and invoke the override-able common setup methods. + * + * Then, invoke the processBatch override method to do the operator work. + */ + @Override + public void process(Object row, int tag) throws HiveException { + + VectorizedRowBatch batch = (VectorizedRowBatch) row; + alias = (byte) tag; + + if (needCommonSetup) { + + // Our one time process method initialization. + commonSetup(); + + needCommonSetup = false; + } + + if (needFirstBatchSetup) { + + // Our one time first-batch method initialization. + firstBatchSetup(batch); + + needFirstBatchSetup = false; + } + + if (needHashTableSetup) { + + // Setup our hash table specialization. It will be the first time the process + // method is called, or after a Hybrid Grace reload. + + hashTableSetup(); + + needHashTableSetup = false; + } + + batchCounter++; + + if (batch.size == 0) { + return; + } + + rowCounter += batch.size; + + processBatch(batch); + } + protected void displayBatchColumns(VectorizedRowBatch batch, String batchName) { LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator commonSetup " + batchName + " column count " + batch.numCols); for (int column = 0; column < batch.numCols; column++) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterLongOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterLongOperator.java new file mode 100644 index 0000000000..d08c2f09a3 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterLongOperator.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin; + +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +/** + * Specialized class for doing a Native Vector FULL OUTER MapJoin on a Single-Column Long + * using a hash map. + */ +public class VectorMapJoinFullOuterLongOperator extends VectorMapJoinOuterLongOperator { + private static final long serialVersionUID = 1L; + + //------------------------------------------------------------------------------------------------ + + private static final String CLASS_NAME = VectorMapJoinFullOuterLongOperator.class.getName(); + // private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + protected String getLoggingPrefix() { + return super.getLoggingPrefix(CLASS_NAME); + } + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + /** Kryo ctor. */ + protected VectorMapJoinFullOuterLongOperator() { + super(); + } + + public VectorMapJoinFullOuterLongOperator(CompilationOpContext ctx) { + super(ctx); + } + + public VectorMapJoinFullOuterLongOperator(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + // Turn on key matching. + fullOuterHashTableSetup(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterMultiKeyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterMultiKeyOperator.java new file mode 100644 index 0000000000..8d5a4c2e13 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterMultiKeyOperator.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin; + +// import org.slf4j.Logger; +// import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +/** + * Specialized class for doing a Native Vector FULL OUTER MapJoin on a Multi-Keyg + * using a hash map. + */ +public class VectorMapJoinFullOuterMultiKeyOperator extends VectorMapJoinOuterMultiKeyOperator { + + private static final long serialVersionUID = 1L; + + //------------------------------------------------------------------------------------------------ + + private static final String CLASS_NAME = VectorMapJoinFullOuterMultiKeyOperator.class.getName(); + // private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + protected String getLoggingPrefix() { + return super.getLoggingPrefix(CLASS_NAME); + } + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + /** Kryo ctor. */ + protected VectorMapJoinFullOuterMultiKeyOperator() { + super(); + } + + public VectorMapJoinFullOuterMultiKeyOperator(CompilationOpContext ctx) { + super(ctx); + } + + public VectorMapJoinFullOuterMultiKeyOperator(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + // Turn on key matching. + fullOuterHashTableSetup(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterStringOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterStringOperator.java new file mode 100644 index 0000000000..78987f0a18 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinFullOuterStringOperator.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin; + +// import org.slf4j.Logger; +// import org.slf4j.LoggerFactory; +import org.apache.hadoop.hive.ql.CompilationOpContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.VectorDesc; + +/** + * Specialized class for doing a Native Vector FULL OUTER MapJoin on a Single-Column String + * using a hash map. + */ +public class VectorMapJoinFullOuterStringOperator extends VectorMapJoinOuterStringOperator { + + private static final long serialVersionUID = 1L; + + //------------------------------------------------------------------------------------------------ + + private static final String CLASS_NAME = VectorMapJoinOuterStringOperator.class.getName(); + // private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); + + protected String getLoggingPrefix() { + return super.getLoggingPrefix(CLASS_NAME); + } + + //--------------------------------------------------------------------------- + // Pass-thru constructors. + // + + /** Kryo ctor. */ + protected VectorMapJoinFullOuterStringOperator() { + super(); + } + + public VectorMapJoinFullOuterStringOperator(CompilationOpContext ctx) { + super(ctx); + } + + public VectorMapJoinFullOuterStringOperator(CompilationOpContext ctx, OperatorDesc conf, + VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException { + super(ctx, conf, vContext, vectorDesc); + } + + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + // Turn on key matching. + fullOuterHashTableSetup(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java index 3821cc6e48..f5bb547493 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinGenerateResultOperator.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.persistence.HybridHashTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.HybridHashTableContainer.HashPartition; -import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow; @@ -93,9 +92,6 @@ private transient Thread ownThread; private transient int interruptCheckCounter = CHECK_INTERRUPT_PER_OVERFLOW_BATCHES; - // Debug display. - protected transient long batchCounter; - /** Kryo ctor. */ protected VectorMapJoinGenerateResultOperator() { super(); @@ -124,13 +120,6 @@ private void setUpInterruptChecking() { ownThread = Thread.currentThread(); } - protected void commonSetup(VectorizedRowBatch batch) throws HiveException { - super.commonSetup(batch); - - batchCounter = 0; - - } - //------------------------------------------------------------------------------------------------ protected void performValueExpressions(VectorizedRowBatch batch, @@ -157,24 +146,24 @@ protected void performValueExpressions(VectorizedRowBatch batch, batch.selectedInUse = saveSelectedInUse; } - protected void doSmallTableDeserializeRow(VectorizedRowBatch batch, int batchIndex, + protected void doSmallTableValueDeserializeRow(VectorizedRowBatch batch, int batchIndex, ByteSegmentRef byteSegmentRef, VectorMapJoinHashMapResult hashMapResult) throws HiveException { byte[] bytes = byteSegmentRef.getBytes(); int offset = (int) byteSegmentRef.getOffset(); int length = byteSegmentRef.getLength(); - smallTableVectorDeserializeRow.setBytes(bytes, offset, length); + smallTableValueVectorDeserializeRow.setBytes(bytes, offset, length); try { // Our hash tables are immutable. We can safely do by reference STRING, CHAR/VARCHAR, etc. - smallTableVectorDeserializeRow.deserializeByRef(batch, batchIndex); + smallTableValueVectorDeserializeRow.deserializeByRef(batch, batchIndex); } catch (Exception e) { throw new HiveException( "\nHashMapResult detail: " + hashMapResult.getDetailedHashMapResultPositionString() + "\nDeserializeRead detail: " + - smallTableVectorDeserializeRow.getDetailedReadPositionString(), + smallTableValueVectorDeserializeRow.getDetailedReadPositionString(), e); } } @@ -215,22 +204,23 @@ protected int generateHashMapResultSingleValue(VectorizedRowBatch batch, for (int i = 0; i < duplicateCount; i++) { - int batchIndex = allMatchs[allMatchesIndex + i]; + final int batchIndex = allMatchs[allMatchesIndex + i]; - // Outer key copying is only used when we are using the input BigTable batch as the output. - // - if (bigTableVectorCopyOuterKeys != null) { - // Copy within row. - bigTableVectorCopyOuterKeys.copyByReference(batch, batchIndex, batch, batchIndex); + if (outerSmallTableKeyVectorCopy != null) { + + // For [FULL] OUTER MapJoin, copy Big Table keys to Small Table area within + // same batch by reference. + // + outerSmallTableKeyVectorCopy.copyByReference( + batch, batchIndex, + batch, batchIndex); } - if (smallTableVectorDeserializeRow != null) { - doSmallTableDeserializeRow(batch, batchIndex, + if (smallTableValueVectorDeserializeRow != null) { + doSmallTableValueDeserializeRow(batch, batchIndex, byteSegmentRef, hashMapResult); } - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, "generateHashMapResultSingleValue big table"); - // Use the big table row as output. batch.selected[numSel++] = batchIndex; } @@ -273,26 +263,45 @@ protected void generateHashMapResultMultiValue(VectorizedRowBatch batch, for (int i = 0; i < duplicateCount; i++) { - int batchIndex = allMatchs[allMatchesIndex + i]; + final int batchIndex = allMatchs[allMatchesIndex + i]; ByteSegmentRef byteSegmentRef = hashMapResult.first(); while (byteSegmentRef != null) { // Copy the BigTable values into the overflow batch. Since the overflow batch may // not get flushed here, we must copy by value. - // Note this includes any outer join keys that need to go into the small table "area". + // if (bigTableRetainedVectorCopy != null) { - bigTableRetainedVectorCopy.copyByValue(batch, batchIndex, - overflowBatch, overflowBatch.size); + bigTableRetainedVectorCopy.copyByValue( + batch, batchIndex, + overflowBatch, overflowBatch.size); } - if (smallTableVectorDeserializeRow != null) { + if (nonOuterSmallTableKeyVectorCopy != null) { - doSmallTableDeserializeRow(overflowBatch, overflowBatch.size, - byteSegmentRef, hashMapResult); + // For non-[FULL] OUTER MapJoin, copy non-retained Big Table keys to the Big Table area + // across to overflow batch by value so Small Key projection will see its keys... + // + nonOuterSmallTableKeyVectorCopy.copyByValue( + batch, batchIndex, + overflowBatch, overflowBatch.size); } - // VectorizedBatchUtil.debugDisplayOneRow(overflowBatch, overflowBatch.size, "generateHashMapResultMultiValue overflow"); + if (outerSmallTableKeyVectorCopy != null) { + + // For [FULL] OUTER MapJoin, copy Big Table keys to Small Table area across + // to overflow batch by value. + // + outerSmallTableKeyVectorCopy.copyByValue( + batch, batchIndex, + overflowBatch, overflowBatch.size); + } + + if (smallTableValueVectorDeserializeRow != null) { + + doSmallTableValueDeserializeRow(overflowBatch, overflowBatch.size, + byteSegmentRef, hashMapResult); + } overflowBatch.size++; if (overflowBatch.size == overflowBatch.DEFAULT_SIZE) { @@ -333,8 +342,8 @@ private void generateHashMapResultLargeMultiValue(VectorizedRowBatch batch, // Fill up as much of the overflow batch as possible with small table values. while (byteSegmentRef != null) { - if (smallTableVectorDeserializeRow != null) { - doSmallTableDeserializeRow(overflowBatch, overflowBatch.size, + if (smallTableValueVectorDeserializeRow != null) { + doSmallTableValueDeserializeRow(overflowBatch, overflowBatch.size, byteSegmentRef, hashMapResult); } @@ -361,9 +370,40 @@ private void generateHashMapResultLargeMultiValue(VectorizedRowBatch batch, int batchIndex = allMatchs[allMatchesIndex + i]; if (bigTableRetainedVectorCopy != null) { + // The one big table row's values repeat. - bigTableRetainedVectorCopy.copyByReference(batch, batchIndex, overflowBatch, 0); - for (int column : bigTableRetainedMapping.getOutputColumns()) { + bigTableRetainedVectorCopy.copyByReference( + batch, batchIndex, + overflowBatch, 0); + for (int column : bigTableRetainColumnMap) { + overflowBatch.cols[column].isRepeating = true; + } + } + + if (nonOuterSmallTableKeyVectorCopy != null) { + + // For non-[FULL] OUTER MapJoin, copy non-retained Big Table keys to the Big Table area + // across to overflow batch by value so Small Key projection will see its keys... + // + nonOuterSmallTableKeyVectorCopy.copyByValue( + batch, batchIndex, + overflowBatch, 0); + for (int column : nonOuterSmallTableKeyColumnMap) { + overflowBatch.cols[column].isRepeating = true; + } + } + + int[] outerSmallTableKeyColumnMap = null; + if (outerSmallTableKeyVectorCopy != null) { + + // For [FULL] OUTER MapJoin, copy Big Table keys to Small Table area within + // to overflow batch by value. + // + outerSmallTableKeyVectorCopy.copyByValue( + batch, batchIndex, + overflowBatch, 0); + outerSmallTableKeyColumnMap = outerSmallTableKeyMapping.getOutputColumns(); + for (int column : outerSmallTableKeyColumnMap) { overflowBatch.cols[column].isRepeating = true; } } @@ -373,10 +413,20 @@ private void generateHashMapResultLargeMultiValue(VectorizedRowBatch batch, forwardOverflowNoReset(); // Hand reset the big table columns. - for (int column : bigTableRetainedMapping.getOutputColumns()) { + for (int column : bigTableRetainColumnMap) { + ColumnVector colVector = overflowBatch.cols[column]; + colVector.reset(); + } + for (int column : nonOuterSmallTableKeyColumnMap) { ColumnVector colVector = overflowBatch.cols[column]; colVector.reset(); } + if (outerSmallTableKeyColumnMap != null) { + for (int column : outerSmallTableKeyColumnMap) { + ColumnVector colVector = overflowBatch.cols[column]; + colVector.reset(); + } + } } byteSegmentRef = hashMapResult.next(); @@ -476,22 +526,16 @@ private void setupSpillSerDe(VectorizedRowBatch batch) throws HiveException { } private void spillSerializeRow(VectorizedRowBatch batch, int batchIndex, - VectorMapJoinHashTableResult hashTableResult) throws IOException { - - int partitionId = hashTableResult.spillPartitionId(); + int partitionId) throws IOException { HybridHashTableContainer ht = (HybridHashTableContainer) mapJoinTables[posSingleVectorMapJoinSmallTable]; HashPartition hp = ht.getHashPartitions()[partitionId]; VectorRowBytesContainer rowBytesContainer = hp.getMatchfileRowBytesContainer(); Output output = rowBytesContainer.getOuputForRowBytes(); -// int offset = output.getLength(); bigTableVectorSerializeRow.setOutputAppend(output); bigTableVectorSerializeRow.serializeWrite(batch, batchIndex); -// int length = output.getLength() - offset; rowBytesContainer.finishRow(); - -// LOG.debug("spillSerializeRow spilled batchIndex " + batchIndex + ", length " + length); } protected void spillHashMapBatch(VectorizedRowBatch batch, @@ -509,8 +553,18 @@ protected void spillHashMapBatch(VectorizedRowBatch batch, int hashTableResultIndex = spillHashTableResultIndices[i]; VectorMapJoinHashTableResult hashTableResult = hashTableResults[hashTableResultIndex]; - spillSerializeRow(batch, batchIndex, hashTableResult); + spillSerializeRow(batch, batchIndex, hashTableResult.spillPartitionId()); + } + } + + protected void spillRow(VectorizedRowBatch batch, int batchIndex, int partitionId) + throws HiveException, IOException { + + if (bigTableVectorSerializeRow == null) { + setupSpillSerDe(batch); } + + spillSerializeRow(batch, batchIndex, partitionId); } protected void spillBatchRepeated(VectorizedRowBatch batch, @@ -525,7 +579,7 @@ protected void spillBatchRepeated(VectorizedRowBatch batch, for (int logical = 0; logical < batch.size; logical++) { int batchIndex = (selectedInUse ? selected[logical] : logical); - spillSerializeRow(batch, batchIndex, hashTableResult); + spillSerializeRow(batch, batchIndex, hashTableResult.spillPartitionId()); } } @@ -541,8 +595,8 @@ protected void reloadHashTable(byte pos, int partitionId) MapJoinTableContainer smallTable = spilledMapJoinTables[pos]; - vectorMapJoinHashTable = VectorMapJoinOptimizedCreateHashTable.createHashTable(conf, - smallTable); + vectorMapJoinHashTable = + VectorMapJoinOptimizedCreateHashTable.createHashTable(conf, smallTable); needHashTableSetup = true; LOG.info("Created " + vectorMapJoinHashTable.getClass().getSimpleName() + " from " + this.getClass().getSimpleName()); @@ -637,7 +691,7 @@ public void forwardBigTableBatch(VectorizedRowBatch batch) throws HiveException batch.projectionSize = outputProjection.length; batch.projectedColumns = outputProjection; - forward(batch, null, true); + vectorForward(batch); // Revert the projected columns back, because batch can be re-used by our parent operators. batch.projectionSize = originalProjectionSize; @@ -649,7 +703,7 @@ public void forwardBigTableBatch(VectorizedRowBatch batch) throws HiveException * Forward the overflow batch and reset the batch. */ protected void forwardOverflow() throws HiveException { - forward(overflowBatch, null, true); + vectorForward(overflowBatch); overflowBatch.reset(); maybeCheckInterrupt(); } @@ -666,7 +720,7 @@ private void maybeCheckInterrupt() throws HiveException { * Forward the overflow batch, but do not reset the batch. */ private void forwardOverflowNoReset() throws HiveException { - forward(overflowBatch, null, true); + vectorForward(overflowBatch); } /* @@ -679,6 +733,11 @@ private void forwardOverflowNoReset() throws HiveException { @Override public void closeOp(boolean aborted) throws HiveException { super.closeOp(aborted); + + // NOTE: The closeOp call on super MapJoinOperator can trigger Hybrid Grace additional + // NOTE: processing and also FULL OUTER MapJoin non-match Small Table result generation. So, + // NOTE: we flush the overflowBatch after the call. + // if (!aborted && overflowBatch.size > 0) { forwardOverflow(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java index f791d951f8..35dddddb84 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyGenerateResultOperator.java @@ -103,25 +103,25 @@ public VectorMapJoinInnerBigOnlyGenerateResultOperator(CompilationOpContext ctx, /* * Setup our inner big table only join specific members. */ - protected void commonSetup(VectorizedRowBatch batch) throws HiveException { - super.commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); // Inner big-table only join specific. VectorMapJoinHashMultiSet baseHashMultiSet = (VectorMapJoinHashMultiSet) vectorMapJoinHashTable; - hashMultiSetResults = new VectorMapJoinHashMultiSetResult[batch.DEFAULT_SIZE]; + hashMultiSetResults = new VectorMapJoinHashMultiSetResult[VectorizedRowBatch.DEFAULT_SIZE]; for (int i = 0; i < hashMultiSetResults.length; i++) { hashMultiSetResults[i] = baseHashMultiSet.createHashMultiSetResult(); } - allMatchs = new int[batch.DEFAULT_SIZE]; + allMatchs = new int[VectorizedRowBatch.DEFAULT_SIZE]; - equalKeySeriesValueCounts = new long[batch.DEFAULT_SIZE]; - equalKeySeriesAllMatchIndices = new int[batch.DEFAULT_SIZE]; - equalKeySeriesDuplicateCounts = new int[batch.DEFAULT_SIZE]; + equalKeySeriesValueCounts = new long[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesAllMatchIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesDuplicateCounts = new int[VectorizedRowBatch.DEFAULT_SIZE]; - spills = new int[batch.DEFAULT_SIZE]; - spillHashMapResultIndices = new int[batch.DEFAULT_SIZE]; + spills = new int[VectorizedRowBatch.DEFAULT_SIZE]; + spillHashMapResultIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; } //----------------------------------------------------------------------------------------------- diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java index 678fa42678..30a19b8ce7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyLongOperator.java @@ -102,45 +102,36 @@ public VectorMapJoinInnerBigOnlyLongOperator(CompilationOpContext ctx, OperatorD // @Override - public void process(Object row, int tag) throws HiveException { + protected void commonSetup() throws HiveException { + super.commonSetup(); - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); - - /* - * Initialize Single-Column Long members for this specialized class. - */ + /* + * Initialize Single-Column Long members for this specialized class. + */ - singleJoinColumn = bigTableKeyColumnMap[0]; - - needCommonSetup = false; - } - - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. - - /* - * Get our Single-Column Long hash multi-set information for this specialized class. - */ + singleJoinColumn = bigTableKeyColumnMap[0]; + } - hashMultiSet = (VectorMapJoinLongHashMultiSet) vectorMapJoinHashTable; - useMinMax = hashMultiSet.useMinMax(); - if (useMinMax) { - min = hashMultiSet.min(); - max = hashMultiSet.max(); - } + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + /* + * Get our Single-Column Long hash multi-set information for this specialized class. + */ + + hashMultiSet = (VectorMapJoinLongHashMultiSet) vectorMapJoinHashTable; + useMinMax = hashMultiSet.useMinMax(); + if (useMinMax) { + min = hashMultiSet.min(); + max = hashMultiSet.max(); + } + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an inner big-only join. @@ -153,11 +144,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java index 866aa60349..f587517b08 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyMultiKeyOperator.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Multi-Key hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMultiSet; @@ -109,45 +108,40 @@ public VectorMapJoinInnerBigOnlyMultiKeyOperator(CompilationOpContext ctx, Opera // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; + protected void commonSetup() throws HiveException { + super.commonSetup(); - alias = (byte) tag; + /* + * Initialize Multi-Key members for this specialized class. + */ - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); - - /* - * Initialize Multi-Key members for this specialized class. - */ + keyVectorSerializeWrite = new VectorSerializeRow( + new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); + keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); - keyVectorSerializeWrite = new VectorSerializeRow( - new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); - keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); + currentKeyOutput = new Output(); + saveKeyOutput = new Output(); + } - currentKeyOutput = new Output(); - saveKeyOutput = new Output(); + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - needCommonSetup = false; - } + /* + * Get our Single-Column Long hash multi-set information for this specialized class. + */ - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + /* + * Get our Multi-Key hash multi-set information for this specialized class. + */ - /* - * Get our Multi-Key hash multi-set information for this specialized class. - */ - - hashMultiSet = (VectorMapJoinBytesHashMultiSet) vectorMapJoinHashTable; + hashMultiSet = (VectorMapJoinBytesHashMultiSet) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an inner big-only join. @@ -160,11 +154,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java index a0c3b9c155..e373db17b0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerBigOnlyStringOperator.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column String hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMultiSet; @@ -98,40 +97,31 @@ public VectorMapJoinInnerBigOnlyStringOperator(CompilationOpContext ctx, Operato // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); - /* - * Initialize Single-Column String members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; + /* + * Initialize Single-Column String members for this specialized class. + */ - needCommonSetup = false; - } + singleJoinColumn = bigTableKeyColumnMap[0]; + } - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - /* - * Get our Single-Column String hash multi-set information for this specialized class. - */ + /* + * Get our Single-Column String hash multi-set information for this specialized class. + */ - hashMultiSet = (VectorMapJoinBytesHashMultiSet) vectorMapJoinHashTable; + hashMultiSet = (VectorMapJoinBytesHashMultiSet) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an inner big-only join. @@ -144,11 +134,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java index ea2c04d34f..dc5d046364 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerGenerateResultOperator.java @@ -108,26 +108,26 @@ public VectorMapJoinInnerGenerateResultOperator(CompilationOpContext ctx, Operat /* * Setup our inner join specific members. */ - protected void commonSetup(VectorizedRowBatch batch) throws HiveException { - super.commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); // Inner join specific. VectorMapJoinHashMap baseHashMap = (VectorMapJoinHashMap) vectorMapJoinHashTable; - hashMapResults = new VectorMapJoinHashMapResult[batch.DEFAULT_SIZE]; + hashMapResults = new VectorMapJoinHashMapResult[VectorizedRowBatch.DEFAULT_SIZE]; for (int i = 0; i < hashMapResults.length; i++) { hashMapResults[i] = baseHashMap.createHashMapResult(); } - allMatchs = new int[batch.DEFAULT_SIZE]; + allMatchs = new int[VectorizedRowBatch.DEFAULT_SIZE]; - equalKeySeriesHashMapResultIndices = new int[batch.DEFAULT_SIZE]; - equalKeySeriesAllMatchIndices = new int[batch.DEFAULT_SIZE]; - equalKeySeriesIsSingleValue = new boolean[batch.DEFAULT_SIZE]; - equalKeySeriesDuplicateCounts = new int[batch.DEFAULT_SIZE]; + equalKeySeriesHashMapResultIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesAllMatchIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesIsSingleValue = new boolean[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesDuplicateCounts = new int[VectorizedRowBatch.DEFAULT_SIZE]; - spills = new int[batch.DEFAULT_SIZE]; - spillHashMapResultIndices = new int[batch.DEFAULT_SIZE]; + spills = new int[VectorizedRowBatch.DEFAULT_SIZE]; + spillHashMapResultIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; } /* @@ -142,7 +142,7 @@ protected void innerPerBatchSetup(VectorizedRowBatch batch) { // For join operators that can generate small table results, reset their // (target) scratch columns. - for (int column : smallTableOutputVectorColumns) { + for (int column : smallTableValueColumnMap) { ColumnVector smallTableColumn = batch.cols[column]; smallTableColumn.reset(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java index 36404bcc60..5ac606ab01 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerLongOperator.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column Long hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; @@ -101,45 +100,36 @@ public VectorMapJoinInnerLongOperator(CompilationOpContext ctx, OperatorDesc con // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); - - /* - * Initialize Single-Column Long members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; + protected void commonSetup() throws HiveException { + super.commonSetup(); - needCommonSetup = false; - } - - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + /* + * Initialize Single-Column Long members for this specialized class. + */ - /* - * Get our Single-Column Long hash map information for this specialized class. - */ + singleJoinColumn = bigTableKeyColumnMap[0]; + } - hashMap = (VectorMapJoinLongHashMap) vectorMapJoinHashTable; - useMinMax = hashMap.useMinMax(); - if (useMinMax) { - min = hashMap.min(); - max = hashMap.max(); - } + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + /* + * Get our Single-Column Long hash map information for this specialized class. + */ + + hashMap = (VectorMapJoinLongHashMap) vectorMapJoinHashTable; + useMinMax = hashMap.useMinMax(); + if (useMinMax) { + min = hashMap.min(); + max = hashMap.max(); + } + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an inner join. @@ -151,11 +141,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java index 620101f7ff..cdee3fd957 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerMultiKeyOperator.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Multi-Key hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; @@ -107,45 +106,36 @@ public VectorMapJoinInnerMultiKeyOperator(CompilationOpContext ctx, OperatorDesc // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); - /* - * Initialize Multi-Key members for this specialized class. - */ - - keyVectorSerializeWrite = new VectorSerializeRow( - new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); - keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); + /* + * Initialize Multi-Key members for this specialized class. + */ - currentKeyOutput = new Output(); - saveKeyOutput = new Output(); + keyVectorSerializeWrite = new VectorSerializeRow( + new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); + keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); - needCommonSetup = false; - } + currentKeyOutput = new Output(); + saveKeyOutput = new Output(); + } - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - /* - * Get our Multi-Key hash map information for this specialized class. - */ + /* + * Get our Multi-Key hash map information for this specialized class. + */ - hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an inner join. @@ -157,11 +147,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java index d99d514ef4..8e6697ebb3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinInnerStringOperator.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column String hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; @@ -97,40 +96,31 @@ public VectorMapJoinInnerStringOperator(CompilationOpContext ctx, OperatorDesc c // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); - /* - * Initialize Single-Column String members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; + /* + * Initialize Single-Column String members for this specialized class. + */ - needCommonSetup = false; - } + singleJoinColumn = bigTableKeyColumnMap[0]; + } - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - /* - * Get our Single-Column String hash map information for this specialized class. - */ + /* + * Get our Single-Column String hash map information for this specialized class. + */ - hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an inner join. @@ -142,11 +132,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java index f68d4c4da0..71ec56bd64 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiGenerateResultOperator.java @@ -89,21 +89,21 @@ public VectorMapJoinLeftSemiGenerateResultOperator(CompilationOpContext ctx, Ope /* * Setup our left semi join specific members. */ - protected void commonSetup(VectorizedRowBatch batch) throws HiveException { - super.commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); // Semi join specific. VectorMapJoinHashSet baseHashSet = (VectorMapJoinHashSet) vectorMapJoinHashTable; - hashSetResults = new VectorMapJoinHashSetResult[batch.DEFAULT_SIZE]; + hashSetResults = new VectorMapJoinHashSetResult[VectorizedRowBatch.DEFAULT_SIZE]; for (int i = 0; i < hashSetResults.length; i++) { hashSetResults[i] = baseHashSet.createHashSetResult(); } - allMatchs = new int[batch.DEFAULT_SIZE]; + allMatchs = new int[VectorizedRowBatch.DEFAULT_SIZE]; - spills = new int[batch.DEFAULT_SIZE]; - spillHashMapResultIndices = new int[batch.DEFAULT_SIZE]; + spills = new int[VectorizedRowBatch.DEFAULT_SIZE]; + spillHashMapResultIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; } //----------------------------------------------------------------------------------------------- diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java index 4185c5b60b..40e7cfaf83 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiLongOperator.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column Long hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashSet; @@ -102,45 +101,36 @@ public VectorMapJoinLeftSemiLongOperator(CompilationOpContext ctx, OperatorDesc // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); - - /* - * Initialize Single-Column Long members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; + protected void commonSetup() throws HiveException { + super.commonSetup(); - needCommonSetup = false; - } - - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + /* + * Initialize Single-Column Long members for this specialized class. + */ - /* - * Get our Single-Column Long hash set information for this specialized class. - */ + singleJoinColumn = bigTableKeyColumnMap[0]; + } - hashSet = (VectorMapJoinLongHashSet) vectorMapJoinHashTable; - useMinMax = hashSet.useMinMax(); - if (useMinMax) { - min = hashSet.min(); - max = hashSet.max(); - } + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + /* + * Get our Single-Column Long hash set information for this specialized class. + */ + + hashSet = (VectorMapJoinLongHashSet) vectorMapJoinHashTable; + useMinMax = hashSet.useMinMax(); + if (useMinMax) { + min = hashSet.min(); + max = hashSet.max(); + } + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an left semi join. @@ -153,11 +143,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java index 541e7fa01a..e5d9fdae19 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiMultiKeyOperator.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Multi-Key hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashSet; @@ -108,45 +107,36 @@ public VectorMapJoinLeftSemiMultiKeyOperator(CompilationOpContext ctx, OperatorD // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); - /* - * Initialize Multi-Key members for this specialized class. - */ - - keyVectorSerializeWrite = new VectorSerializeRow( - new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); - keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); + /* + * Initialize Multi-Key members for this specialized class. + */ - currentKeyOutput = new Output(); - saveKeyOutput = new Output(); + keyVectorSerializeWrite = new VectorSerializeRow( + new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); + keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); - needCommonSetup = false; - } + currentKeyOutput = new Output(); + saveKeyOutput = new Output(); + } - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - /* - * Get our Multi-Key hash set information for this specialized class. - */ + /* + * Get our Multi-Key hash set information for this specialized class. + */ - hashSet = (VectorMapJoinBytesHashSet) vectorMapJoinHashTable; + hashSet = (VectorMapJoinBytesHashSet) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an left semi join. @@ -159,11 +149,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java index 6785bcef6d..df900a170c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinLeftSemiStringOperator.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column String hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashSet; @@ -98,40 +97,31 @@ public VectorMapJoinLeftSemiStringOperator(CompilationOpContext ctx, OperatorDes // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); - /* - * Initialize Single-Column String members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; + /* + * Initialize Single-Column String members for this specialized class. + */ - needCommonSetup = false; - } + singleJoinColumn = bigTableKeyColumnMap[0]; + } - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - /* - * Get our Single-Column String hash set information for this specialized class. - */ + /* + * Get our Single-Column String hash set information for this specialized class. + */ - hashSet = (VectorMapJoinBytesHashSet) vectorMapJoinHashTable; + hashSet = (VectorMapJoinBytesHashSet) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { // Do the per-batch setup for an left semi join. @@ -144,11 +134,7 @@ public void process(Object row, int tag) throws HiveException { } final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } return; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java index 2e5c5685bf..61bcbf075e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterGenerateResultOperator.java @@ -24,13 +24,19 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.VectorDesc; @@ -131,32 +137,34 @@ public VectorMapJoinOuterGenerateResultOperator(CompilationOpContext ctx, Operat /* * Setup our outer join specific members. */ - protected void commonSetup(VectorizedRowBatch batch) throws HiveException { - super.commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); // Outer join specific. VectorMapJoinHashMap baseHashMap = (VectorMapJoinHashMap) vectorMapJoinHashTable; - hashMapResults = new VectorMapJoinHashMapResult[batch.DEFAULT_SIZE]; + hashMapResults = new VectorMapJoinHashMapResult[VectorizedRowBatch.DEFAULT_SIZE]; for (int i = 0; i < hashMapResults.length; i++) { hashMapResults[i] = baseHashMap.createHashMapResult(); } - inputSelected = new int[batch.DEFAULT_SIZE]; + inputSelected = new int[VectorizedRowBatch.DEFAULT_SIZE]; - allMatchs = new int[batch.DEFAULT_SIZE]; + allMatchs = new int[VectorizedRowBatch.DEFAULT_SIZE]; - equalKeySeriesHashMapResultIndices = new int[batch.DEFAULT_SIZE]; - equalKeySeriesAllMatchIndices = new int[batch.DEFAULT_SIZE]; - equalKeySeriesIsSingleValue = new boolean[batch.DEFAULT_SIZE]; - equalKeySeriesDuplicateCounts = new int[batch.DEFAULT_SIZE]; + equalKeySeriesHashMapResultIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesAllMatchIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesIsSingleValue = new boolean[VectorizedRowBatch.DEFAULT_SIZE]; + equalKeySeriesDuplicateCounts = new int[VectorizedRowBatch.DEFAULT_SIZE]; - spills = new int[batch.DEFAULT_SIZE]; - spillHashMapResultIndices = new int[batch.DEFAULT_SIZE]; + spills = new int[VectorizedRowBatch.DEFAULT_SIZE]; + spillHashMapResultIndices = new int[VectorizedRowBatch.DEFAULT_SIZE]; - nonSpills = new int[batch.DEFAULT_SIZE]; - noMatchs = new int[batch.DEFAULT_SIZE]; - merged = new int[batch.DEFAULT_SIZE]; + nonSpills = new int[VectorizedRowBatch.DEFAULT_SIZE]; + noMatchs = new int[VectorizedRowBatch.DEFAULT_SIZE]; + merged = new int[VectorizedRowBatch.DEFAULT_SIZE]; + + matchTracker = null; } @@ -174,15 +182,16 @@ protected void outerPerBatchSetup(VectorizedRowBatch batch) { // For join operators that can generate small table results, reset their // (target) scratch columns. - for (int column : smallTableOutputVectorColumns) { + for (int column : outerSmallTableKeyColumnMap) { + ColumnVector bigTableOuterKeyColumn = batch.cols[column]; + bigTableOuterKeyColumn.reset(); + } + + for (int column : smallTableValueColumnMap) { ColumnVector smallTableColumn = batch.cols[column]; smallTableColumn.reset(); } - for (int column : bigTableOuterKeyOutputVectorColumns) { - ColumnVector bigTableOuterKeyColumn = batch.cols[column]; - bigTableOuterKeyColumn.reset(); - } } /** @@ -569,27 +578,28 @@ public void finishOuter(VectorizedRowBatch batch, protected void generateOuterNulls(VectorizedRowBatch batch, int[] noMatchs, int noMatchSize) throws IOException, HiveException { - // Set null information in the small table results area. + // Set null information in the small table results area. - for (int i = 0; i < noMatchSize; i++) { - int batchIndex = noMatchs[i]; + for (int i = 0; i < noMatchSize; i++) { + int batchIndex = noMatchs[i]; - // Mark any scratch small table scratch columns that would normally receive a copy of the - // key as null, too. - for (int column : bigTableOuterKeyOutputVectorColumns) { - ColumnVector colVector = batch.cols[column]; - colVector.noNulls = false; - colVector.isNull[batchIndex] = true; - } + // Mark any scratch small table scratch columns that would normally receive a copy of the + // key as null, too. + // + for (int column : outerSmallTableKeyColumnMap) { + ColumnVector colVector = batch.cols[column]; + colVector.noNulls = false; + colVector.isNull[batchIndex] = true; + } - // Small table values are set to null. - for (int column : smallTableOutputVectorColumns) { - ColumnVector colVector = batch.cols[column]; - colVector.noNulls = false; - colVector.isNull[batchIndex] = true; - } - } - } + // Small table values are set to null. + for (int column : smallTableValueColumnMap) { + ColumnVector colVector = batch.cols[column]; + colVector.noNulls = false; + colVector.isNull[batchIndex] = true; + } + } + } /** * Generate the outer join output results for one vectorized row batch with a repeated key. @@ -734,20 +744,310 @@ public void finishOuterRepeated(VectorizedRowBatch batch, JoinUtil.JoinResult jo */ protected void generateOuterNullsRepeatedAll(VectorizedRowBatch batch) throws HiveException { - for (int column : smallTableOutputVectorColumns) { + // Mark any scratch small table scratch columns that would normally receive a copy of the + // key as null, too. + // + for (int column : outerSmallTableKeyColumnMap) { ColumnVector colVector = batch.cols[column]; colVector.noNulls = false; colVector.isNull[0] = true; colVector.isRepeating = true; } - // Mark any scratch small table scratch columns that would normally receive a copy of the key - // as null, too. - for (int column : bigTableOuterKeyOutputVectorColumns) { + for (int column : smallTableValueColumnMap) { ColumnVector colVector = batch.cols[column]; colVector.noNulls = false; colVector.isNull[0] = true; colVector.isRepeating = true; } } + + private void markBigTableColumnsAsNullRepeating() { + + /* + * For non-match FULL OUTER Small Table results, the Big Table columns are all NULL. + */ + for (int column : bigTableRetainColumnMap) { + ColumnVector colVector = overflowBatch.cols[column]; + colVector.isRepeating = true; + colVector.noNulls = false; + colVector.isNull[0] = true; + } + } + + /* + * For FULL OUTER MapJoin, find the non matched Small Table keys and values and odd them to the + * join output result. + */ + @Override + protected void generateFullOuterSmallTableNoMatches(byte smallTablePos, + MapJoinTableContainer substituteSmallTable) throws HiveException { + + /* + * For dynamic partition hash join, both the Big Table and Small Table are partitioned (sent) + * to the Reducer using the key hash code. So, we can generate the non-match Small Table + * results locally. + * + * Scan the Small Table for keys that didn't match and generate the non-matchs into the + * overflowBatch. + */ + + /* + * If there were no matched keys sent, we need to do our common initialization. + */ + if (needCommonSetup) { + + // Our one time process method initialization. + commonSetup(); + + needCommonSetup = false; + } + + if (needHashTableSetup) { + + // Setup our hash table specialization. It will be the first time the process + // method is called, or after a Hybrid Grace reload. + + hashTableSetup(); + + needHashTableSetup = false; + } + + /* + * To support fancy NULL repeating columns, let's flush the overflowBatch if it has anything. + */ + if (overflowBatch.size > 0) { + forwardOverflow(); + } + markBigTableColumnsAsNullRepeating(); + + switch (hashTableKeyType) { + case BOOLEAN: + case BYTE: + case SHORT: + case INT: + case LONG: + generateFullOuterLongKeySmallTableNoMatches(); + break; + case STRING: + generateFullOuterStringKeySmallTableNoMatches(); + break; + case MULTI_KEY: + generateFullOuterMultiKeySmallTableNoMatches(); + break; + default: + throw new RuntimeException("Unexpected hash table key type " + hashTableKeyType); + } + } + + /* + * For FULL OUTER MapJoin, find the non matched Small Table Long keys and values and odd them to + * the join output result. + */ + protected void generateFullOuterLongKeySmallTableNoMatches() + throws HiveException { + + final LongColumnVector singleSmallTableKeyOutputColumnVector; + if (allSmallTableKeyColumnIncluded[0]) { + singleSmallTableKeyOutputColumnVector = + (LongColumnVector) overflowBatch.cols[allSmallTableKeyColumnNums[0]]; + } else { + singleSmallTableKeyOutputColumnVector = null; + } + + VectorMapJoinLongHashMap hashMap = (VectorMapJoinLongHashMap) vectorMapJoinHashTable; + + VectorMapJoinNonMatchedIterator nonMatchedIterator = + hashMap.createNonMatchedIterator(matchTracker); + nonMatchedIterator.init(); + while (nonMatchedIterator.findNextNonMatched()) { + + final long longKey; + boolean isKeyNull = !nonMatchedIterator.readNonMatchedLongKey(); + if (!isKeyNull) { + longKey = nonMatchedIterator.getNonMatchedLongKey(); + } else { + longKey = 0; + } + + VectorMapJoinHashMapResult hashMapResult = nonMatchedIterator.getNonMatchedHashMapResult(); + + ByteSegmentRef byteSegmentRef = hashMapResult.first(); + while (byteSegmentRef != null) { + + // NOTE: Big Table result columns were marked repeating NULL already. + + if (singleSmallTableKeyOutputColumnVector != null) { + if (isKeyNull) { + singleSmallTableKeyOutputColumnVector.isNull[overflowBatch.size] = true; + singleSmallTableKeyOutputColumnVector.noNulls = false; + } else { + singleSmallTableKeyOutputColumnVector.vector[overflowBatch.size] = longKey; + singleSmallTableKeyOutputColumnVector.isNull[overflowBatch.size] = false; + } + } + + if (smallTableValueVectorDeserializeRow != null) { + + doSmallTableValueDeserializeRow(overflowBatch, overflowBatch.size, + byteSegmentRef, hashMapResult); + } + + overflowBatch.size++; + if (overflowBatch.size == overflowBatch.DEFAULT_SIZE) { + forwardOverflow(); + markBigTableColumnsAsNullRepeating(); + } + byteSegmentRef = hashMapResult.next(); + } + } + } + + private void doSmallTableKeyDeserializeRow(VectorizedRowBatch batch, int batchIndex, + byte[] keyBytes, int keyOffset, int keyLength) + throws HiveException { + + smallTableKeyOuterVectorDeserializeRow.setBytes(keyBytes, keyOffset, keyLength); + + try { + // Our hash tables are immutable. We can safely do by reference STRING, CHAR/VARCHAR, etc. + smallTableKeyOuterVectorDeserializeRow.deserializeByRef(batch, batchIndex); + } catch (Exception e) { + throw new HiveException( + "\nDeserializeRead detail: " + + smallTableKeyOuterVectorDeserializeRow.getDetailedReadPositionString(), + e); + } + } + + /* + * For FULL OUTER MapJoin, find the non matched Small Table Multi-Keys and values and odd them to + * the join output result. + */ + protected void generateFullOuterMultiKeySmallTableNoMatches() throws HiveException { + + VectorMapJoinBytesHashMap hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + + VectorMapJoinNonMatchedIterator nonMatchedIterator = + hashMap.createNonMatchedIterator(matchTracker); + nonMatchedIterator.init(); + while (nonMatchedIterator.findNextNonMatched()) { + + nonMatchedIterator.readNonMatchedBytesKey(); + byte[] keyBytes = nonMatchedIterator.getNonMatchedBytes(); + final int keyOffset = nonMatchedIterator.getNonMatchedBytesOffset(); + final int keyLength = nonMatchedIterator.getNonMatchedBytesLength(); + + VectorMapJoinHashMapResult hashMapResult = nonMatchedIterator.getNonMatchedHashMapResult(); + + ByteSegmentRef byteSegmentRef = hashMapResult.first(); + while (byteSegmentRef != null) { + + // NOTE: Big Table result columns were marked repeating NULL already. + + if (smallTableKeyOuterVectorDeserializeRow != null) { + doSmallTableKeyDeserializeRow(overflowBatch, overflowBatch.size, + keyBytes, keyOffset, keyLength); + } + + if (smallTableValueVectorDeserializeRow != null) { + + doSmallTableValueDeserializeRow(overflowBatch, overflowBatch.size, + byteSegmentRef, hashMapResult); + } + + overflowBatch.size++; + if (overflowBatch.size == overflowBatch.DEFAULT_SIZE) { + forwardOverflow(); + markBigTableColumnsAsNullRepeating(); + } + byteSegmentRef = hashMapResult.next(); + } + } + + // NOTE: We don't have to deal with FULL OUTER All-NULL key values like we do for single-column + // LONG and STRING because we do store them in the hash map... + } + + /* + * For FULL OUTER MapJoin, find the non matched Small Table String keys and values and odd them to + * the join output result. + */ + protected void generateFullOuterStringKeySmallTableNoMatches() throws HiveException { + + final BytesColumnVector singleSmallTableKeyOutputColumnVector; + if (allSmallTableKeyColumnIncluded[0]) { + singleSmallTableKeyOutputColumnVector = + (BytesColumnVector) overflowBatch.cols[allSmallTableKeyColumnNums[0]]; + } else { + singleSmallTableKeyOutputColumnVector = null; + } + + VectorMapJoinBytesHashMap hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + + VectorMapJoinNonMatchedIterator nonMatchedIterator = + hashMap.createNonMatchedIterator(matchTracker); + nonMatchedIterator.init(); + while (nonMatchedIterator.findNextNonMatched()) { + + final byte[] keyBytes; + final int keyOffset; + final int keyLength; + boolean isKeyNull = !nonMatchedIterator.readNonMatchedBytesKey(); + if (!isKeyNull) { + keyBytes = nonMatchedIterator.getNonMatchedBytes(); + keyOffset = nonMatchedIterator.getNonMatchedBytesOffset(); + keyLength = nonMatchedIterator.getNonMatchedBytesLength(); + } else { + keyBytes = null; + keyOffset = 0; + keyLength = 0; + } + + VectorMapJoinHashMapResult hashMapResult = nonMatchedIterator.getNonMatchedHashMapResult(); + + ByteSegmentRef byteSegmentRef = hashMapResult.first(); + while (byteSegmentRef != null) { + + // NOTE: Big Table result columns were marked repeating NULL already. + + if (singleSmallTableKeyOutputColumnVector != null) { + if (isKeyNull) { + singleSmallTableKeyOutputColumnVector.isNull[overflowBatch.size] = true; + singleSmallTableKeyOutputColumnVector.noNulls = false; + } else { + singleSmallTableKeyOutputColumnVector.setVal( + overflowBatch.size, + keyBytes, keyOffset, keyLength); + singleSmallTableKeyOutputColumnVector.isNull[overflowBatch.size] = false; + } + } + + if (smallTableValueVectorDeserializeRow != null) { + + doSmallTableValueDeserializeRow(overflowBatch, overflowBatch.size, + byteSegmentRef, hashMapResult); + } + + overflowBatch.size++; + if (overflowBatch.size == overflowBatch.DEFAULT_SIZE) { + forwardOverflow(); + markBigTableColumnsAsNullRepeating(); + } + byteSegmentRef = hashMapResult.next(); + } + } + } + + protected void fullOuterHashTableSetup() { + + // Always track key matches for FULL OUTER. + matchTracker = vectorMapJoinHashTable.createMatchTracker(); + + } + + protected void fullOuterIntersectHashTableSetup() { + + matchTracker = vectorMapJoinHashTable.createMatchTracker(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java index be05cc2496..f3f5a36df0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterLongOperator.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column Long hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; @@ -65,7 +64,7 @@ protected String getLoggingPrefix() { //--------------------------------------------------------------------------- // The hash map for this specialized class. - private transient VectorMapJoinLongHashMap hashMap; + protected transient VectorMapJoinLongHashMap hashMap; //--------------------------------------------------------------------------- // Single-Column Long specific members. @@ -77,7 +76,7 @@ protected String getLoggingPrefix() { private transient long max; // The column number for this one column join specialization. - private transient int singleJoinColumn; + protected transient int singleJoinColumn; //--------------------------------------------------------------------------- // Pass-thru constructors. @@ -102,55 +101,39 @@ public VectorMapJoinOuterLongOperator(CompilationOpContext ctx, OperatorDesc con // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; - - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + protected void commonSetup() throws HiveException { + super.commonSetup(); - /* - * Initialize Single-Column Long members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; - - needCommonSetup = false; - } + /* + * Initialize Single-Column Long members for this specialized class. + */ - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. - - /* - * Get our Single-Column Long hash map information for this specialized class. - */ + singleJoinColumn = bigTableKeyColumnMap[0]; + } - hashMap = (VectorMapJoinLongHashMap) vectorMapJoinHashTable; - useMinMax = hashMap.useMinMax(); - if (useMinMax) { - min = hashMap.min(); - max = hashMap.max(); - } + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); + + /* + * Get our Single-Column Long hash map information for this specialized class. + */ + + hashMap = (VectorMapJoinLongHashMap) vectorMapJoinHashTable; + useMinMax = hashMap.useMinMax(); + if (useMinMax) { + min = hashMap.min(); + max = hashMap.max(); + } + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } - return; - } - // Do the per-batch setup for an outer join. outerPerBatchSetup(batch); @@ -160,9 +143,6 @@ public void process(Object row, int tag) throws HiveException { // later. boolean inputSelectedInUse = batch.selectedInUse; if (inputSelectedInUse) { - // if (!verifyMonotonicallyIncreasing(batch.selected, batch.size)) { - // throw new HiveException("batch.selected is not in sort order and unique"); - // } System.arraycopy(batch.selected, 0, inputSelected, 0, inputLogicalSize); } @@ -174,19 +154,6 @@ public void process(Object row, int tag) throws HiveException { ve.evaluate(batch); } someRowsFilteredOut = (batch.size != inputLogicalSize); - if (LOG.isDebugEnabled()) { - if (batch.selectedInUse) { - if (inputSelectedInUse) { - LOG.debug(CLASS_NAME + - " inputSelected " + intArrayToRangesString(inputSelected, inputLogicalSize) + - " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); - } else { - LOG.debug(CLASS_NAME + - " inputLogicalSize " + inputLogicalSize + - " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); - } - } - } } // Perform any key expressions. Results will go into scratch columns. @@ -234,12 +201,11 @@ public void process(Object row, int tag) throws HiveException { } else { // Handle *repeated* join key, if found. long key = vector[0]; - // LOG.debug(CLASS_NAME + " repeated key " + key); if (useMinMax && (key < min || key > max)) { // Out of range for whole batch. joinResult = JoinUtil.JoinResult.NOMATCH; } else { - joinResult = hashMap.lookup(key, hashMapResults[0]); + joinResult = hashMap.lookup(key, hashMapResults[0], matchTracker); } } @@ -247,9 +213,6 @@ public void process(Object row, int tag) throws HiveException { * Common repeated join result processing. */ - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " repeated joinResult " + joinResult.name()); - } finishOuterRepeated(batch, joinResult, hashMapResults[0], someRowsFilteredOut, inputSelectedInUse, inputLogicalSize); } else { @@ -258,10 +221,6 @@ public void process(Object row, int tag) throws HiveException { * NOT Repeating. */ - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " non-repeated"); - } - int selected[] = batch.selected; boolean selectedInUse = batch.selectedInUse; @@ -286,8 +245,6 @@ public void process(Object row, int tag) throws HiveException { for (int logical = 0; logical < batch.size; logical++) { int batchIndex = (selectedInUse ? selected[logical] : logical); - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, taskName + ", " + getOperatorId() + " candidate " + CLASS_NAME + " batch"); - /* * Single-Column Long outer null detection. */ @@ -305,7 +262,6 @@ public void process(Object row, int tag) throws HiveException { atLeastOneNonMatch = true; - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " NULL"); } else { /* @@ -354,11 +310,10 @@ public void process(Object row, int tag) throws HiveException { // Key out of range for whole hash table. saveJoinResult = JoinUtil.JoinResult.NOMATCH; } else { - saveJoinResult = hashMap.lookup(currentKey, hashMapResults[hashMapResultCount]); + saveJoinResult = hashMap.lookup(currentKey, hashMapResults[hashMapResultCount], + matchTracker); } - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " New Key " + currentKey + " " + saveJoinResult.name()); - /* * Common outer join result processing. */ @@ -370,7 +325,6 @@ public void process(Object row, int tag) throws HiveException { equalKeySeriesIsSingleValue[equalKeySeriesCount] = hashMapResults[hashMapResultCount].isSingleRow(); equalKeySeriesDuplicateCounts[equalKeySeriesCount] = 1; allMatchs[allMatchCount++] = batchIndex; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH isSingleValue " + equalKeySeriesIsSingleValue[equalKeySeriesCount] + " currentKey " + currentKey); break; case SPILL: @@ -381,11 +335,9 @@ public void process(Object row, int tag) throws HiveException { case NOMATCH: atLeastOneNonMatch = true; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH" + " currentKey " + currentKey); break; } } else { - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " Key Continues " + saveKey + " " + saveJoinResult.name()); // Series of equal keys. @@ -393,7 +345,6 @@ public void process(Object row, int tag) throws HiveException { case MATCH: equalKeySeriesDuplicateCounts[equalKeySeriesCount]++; allMatchs[allMatchCount++] = batchIndex; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH duplicate"); break; case SPILL: @@ -403,13 +354,9 @@ public void process(Object row, int tag) throws HiveException { break; case NOMATCH: - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH duplicate"); break; } } - // if (!verifyMonotonicallyIncreasing(allMatchs, allMatchCount)) { - // throw new HiveException("allMatchs is not in sort order and unique"); - // } } } @@ -451,7 +398,9 @@ public void process(Object row, int tag) throws HiveException { } if (batch.size > 0) { - // Forward any remaining selected rows. + + // Forward any rows in the Big Table batch that had results added (they will be selected). + // NOTE: Other result rows may have been generated in the overflowBatch. forwardBigTableBatch(batch); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java index 70f88e38ec..29c531bd51 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterMultiKeyOperator.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Multi-Key hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; @@ -69,17 +68,17 @@ protected String getLoggingPrefix() { //--------------------------------------------------------------------------- // The hash map for this specialized class. - private transient VectorMapJoinBytesHashMap hashMap; + protected transient VectorMapJoinBytesHashMap hashMap; //--------------------------------------------------------------------------- // Multi-Key specific members. // // Object that can take a set of columns in row in a vectorized row batch and serialized it. - private transient VectorSerializeRow keyVectorSerializeWrite; + protected transient VectorSerializeRow keyVectorSerializeWrite; // The BinarySortable serialization of the current key. - private transient Output currentKeyOutput; + protected transient Output currentKeyOutput; // The BinarySortable serialization of the saved key for a possible series of equal keys. private transient Output saveKeyOutput; @@ -107,55 +106,40 @@ public VectorMapJoinOuterMultiKeyOperator(CompilationOpContext ctx, OperatorDesc // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; + protected void commonSetup() throws HiveException { + super.commonSetup(); - alias = (byte) tag; + /* + * Initialize Multi-Key members for this specialized class. + */ - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + keyVectorSerializeWrite = new VectorSerializeRow( + new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); + keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); - /* - * Initialize Multi-Key members for this specialized class. - */ - - keyVectorSerializeWrite = new VectorSerializeRow( - new BinarySortableSerializeWrite(bigTableKeyColumnMap.length)); - keyVectorSerializeWrite.init(bigTableKeyTypeInfos, bigTableKeyColumnMap); - - currentKeyOutput = new Output(); - saveKeyOutput = new Output(); + currentKeyOutput = new Output(); + saveKeyOutput = new Output(); + } - needCommonSetup = false; - } + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + /* + * Get our Multi-Key hash map information for this specialized class. + */ - /* - * Get our Multi-Key hash map information for this specialized class. - */ + hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; - hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } - return; - } - // Do the per-batch setup for an outer join. outerPerBatchSetup(batch); @@ -165,9 +149,6 @@ public void process(Object row, int tag) throws HiveException { // later. boolean inputSelectedInUse = batch.selectedInUse; if (inputSelectedInUse) { - // if (!verifyMonotonicallyIncreasing(batch.selected, batch.size)) { - // throw new HiveException("batch.selected is not in sort order and unique"); - // } System.arraycopy(batch.selected, 0, inputSelected, 0, inputLogicalSize); } @@ -179,19 +160,6 @@ public void process(Object row, int tag) throws HiveException { ve.evaluate(batch); } someRowsFilteredOut = (batch.size != inputLogicalSize); - if (LOG.isDebugEnabled()) { - if (batch.selectedInUse) { - if (inputSelectedInUse) { - LOG.debug(CLASS_NAME + - " inputSelected " + intArrayToRangesString(inputSelected, inputLogicalSize) + - " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); - } else { - LOG.debug(CLASS_NAME + - " inputLogicalSize " + inputLogicalSize + - " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); - } - } - } } // Perform any key expressions. Results will go into scratch columns. @@ -259,16 +227,13 @@ public void process(Object row, int tag) throws HiveException { keyVectorSerializeWrite.serializeWrite(batch, 0); byte[] keyBytes = currentKeyOutput.getData(); int keyLength = currentKeyOutput.getLength(); - joinResult = hashMap.lookup(keyBytes, 0, keyLength, hashMapResults[0]); + joinResult = hashMap.lookup(keyBytes, 0, keyLength, hashMapResults[0], matchTracker); } /* * Common repeated join result processing. */ - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " repeated joinResult " + joinResult.name()); - } finishOuterRepeated(batch, joinResult, hashMapResults[0], someRowsFilteredOut, inputSelectedInUse, inputLogicalSize); } else { @@ -277,10 +242,6 @@ public void process(Object row, int tag) throws HiveException { * NOT Repeating. */ - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " non-repeated"); - } - int selected[] = batch.selected; boolean selectedInUse = batch.selectedInUse; @@ -305,8 +266,6 @@ public void process(Object row, int tag) throws HiveException { for (int logical = 0; logical < batch.size; logical++) { int batchIndex = (selectedInUse ? selected[logical] : logical); - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, taskName + ", " + getOperatorId() + " candidate " + CLASS_NAME + " batch"); - /* * Multi-Key outer null detection. */ @@ -325,7 +284,6 @@ public void process(Object row, int tag) throws HiveException { atLeastOneNonMatch = true; - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " NULL"); } else { /* @@ -375,7 +333,9 @@ public void process(Object row, int tag) throws HiveException { byte[] keyBytes = saveKeyOutput.getData(); int keyLength = saveKeyOutput.getLength(); - saveJoinResult = hashMap.lookup(keyBytes, 0, keyLength, hashMapResults[hashMapResultCount]); + saveJoinResult = hashMap.lookup(keyBytes, 0, keyLength, + hashMapResults[hashMapResultCount], matchTracker); + /* * Common outer join result processing. @@ -388,7 +348,6 @@ public void process(Object row, int tag) throws HiveException { equalKeySeriesIsSingleValue[equalKeySeriesCount] = hashMapResults[hashMapResultCount].isSingleRow(); equalKeySeriesDuplicateCounts[equalKeySeriesCount] = 1; allMatchs[allMatchCount++] = batchIndex; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH isSingleValue " + equalKeySeriesIsSingleValue[equalKeySeriesCount] + " currentKey " + currentKey); break; case SPILL: @@ -399,11 +358,9 @@ public void process(Object row, int tag) throws HiveException { case NOMATCH: atLeastOneNonMatch = true; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH" + " currentKey " + currentKey); break; } } else { - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " Key Continues " + saveKey + " " + saveJoinResult.name()); // Series of equal keys. @@ -411,7 +368,6 @@ public void process(Object row, int tag) throws HiveException { case MATCH: equalKeySeriesDuplicateCounts[equalKeySeriesCount]++; allMatchs[allMatchCount++] = batchIndex; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH duplicate"); break; case SPILL: @@ -421,13 +377,9 @@ public void process(Object row, int tag) throws HiveException { break; case NOMATCH: - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH duplicate"); break; } } - // if (!verifyMonotonicallyIncreasing(allMatchs, allMatchCount)) { - // throw new HiveException("allMatchs is not in sort order and unique"); - // } } } @@ -469,7 +421,9 @@ public void process(Object row, int tag) throws HiveException { } if (batch.size > 0) { - // Forward any remaining selected rows. + + // Forward any rows in the Big Table batch that had results added (they will be selected). + // NOTE: Other result rows may have been generated in the overflowBatch. forwardBigTableBatch(batch); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java index 714f5ecb59..a19941a633 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinOuterStringOperator.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; - import org.apache.hadoop.hive.ql.plan.VectorDesc; // Single-Column String hash table import. import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; @@ -65,14 +64,14 @@ protected String getLoggingPrefix() { //--------------------------------------------------------------------------- // The hash map for this specialized class. - private transient VectorMapJoinBytesHashMap hashMap; + protected transient VectorMapJoinBytesHashMap hashMap; //--------------------------------------------------------------------------- // Single-Column String specific members. // // The column number for this one column join specialization. - private transient int singleJoinColumn; + protected transient int singleJoinColumn; //--------------------------------------------------------------------------- // Pass-thru constructors. @@ -97,50 +96,35 @@ public VectorMapJoinOuterStringOperator(CompilationOpContext ctx, OperatorDesc c // @Override - public void process(Object row, int tag) throws HiveException { - - try { - VectorizedRowBatch batch = (VectorizedRowBatch) row; - - alias = (byte) tag; + protected void commonSetup() throws HiveException { + super.commonSetup(); - if (needCommonSetup) { - // Our one time process method initialization. - commonSetup(batch); + /* + * Initialize Single-Column String members for this specialized class. + */ - /* - * Initialize Single-Column String members for this specialized class. - */ - - singleJoinColumn = bigTableKeyColumnMap[0]; + singleJoinColumn = bigTableKeyColumnMap[0]; + } - needCommonSetup = false; - } + @Override + public void hashTableSetup() throws HiveException { + super.hashTableSetup(); - if (needHashTableSetup) { - // Setup our hash table specialization. It will be the first time the process - // method is called, or after a Hybrid Grace reload. + /* + * Get our Single-Column String hash map information for this specialized class. + */ - /* - * Get our Single-Column String hash map information for this specialized class. - */ + hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; - hashMap = (VectorMapJoinBytesHashMap) vectorMapJoinHashTable; + } - needHashTableSetup = false; - } + @Override + public void processBatch(VectorizedRowBatch batch) throws HiveException { - batchCounter++; + try { final int inputLogicalSize = batch.size; - if (inputLogicalSize == 0) { - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " empty"); - } - return; - } - // Do the per-batch setup for an outer join. outerPerBatchSetup(batch); @@ -150,33 +134,17 @@ public void process(Object row, int tag) throws HiveException { // later. boolean inputSelectedInUse = batch.selectedInUse; if (inputSelectedInUse) { - // if (!verifyMonotonicallyIncreasing(batch.selected, batch.size)) { - // throw new HiveException("batch.selected is not in sort order and unique"); - // } System.arraycopy(batch.selected, 0, inputSelected, 0, inputLogicalSize); } // Filtering for outer join just removes rows available for hash table matching. - boolean someRowsFilteredOut = false; + boolean someRowsFilteredOut = false; if (bigTableFilterExpressions.length > 0) { // Since the input for (VectorExpression ve : bigTableFilterExpressions) { ve.evaluate(batch); } someRowsFilteredOut = (batch.size != inputLogicalSize); - if (LOG.isDebugEnabled()) { - if (batch.selectedInUse) { - if (inputSelectedInUse) { - LOG.debug(CLASS_NAME + - " inputSelected " + intArrayToRangesString(inputSelected, inputLogicalSize) + - " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); - } else { - LOG.debug(CLASS_NAME + - " inputLogicalSize " + inputLogicalSize + - " filtered batch.selected " + intArrayToRangesString(batch.selected, batch.size)); - } - } - } } // Perform any key expressions. Results will go into scratch columns. @@ -228,7 +196,8 @@ public void process(Object row, int tag) throws HiveException { byte[] keyBytes = vector[0]; int keyStart = start[0]; int keyLength = length[0]; - joinResult = hashMap.lookup(keyBytes, keyStart, keyLength, hashMapResults[0]); + joinResult = hashMap.lookup( + keyBytes, keyStart, keyLength, hashMapResults[0], matchTracker); } /* @@ -246,10 +215,6 @@ public void process(Object row, int tag) throws HiveException { * NOT Repeating. */ - if (LOG.isDebugEnabled()) { - LOG.debug(CLASS_NAME + " batch #" + batchCounter + " non-repeated"); - } - int selected[] = batch.selected; boolean selectedInUse = batch.selectedInUse; @@ -274,8 +239,6 @@ public void process(Object row, int tag) throws HiveException { for (int logical = 0; logical < batch.size; logical++) { int batchIndex = (selectedInUse ? selected[logical] : logical); - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, taskName + ", " + getOperatorId() + " candidate " + CLASS_NAME + " batch"); - /* * Single-Column String outer null detection. */ @@ -293,7 +256,6 @@ public void process(Object row, int tag) throws HiveException { atLeastOneNonMatch = true; - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " NULL"); } else { /* @@ -343,7 +305,8 @@ public void process(Object row, int tag) throws HiveException { byte[] keyBytes = vector[batchIndex]; int keyStart = start[batchIndex]; int keyLength = length[batchIndex]; - saveJoinResult = hashMap.lookup(keyBytes, keyStart, keyLength, hashMapResults[hashMapResultCount]); + saveJoinResult = hashMap.lookup(keyBytes, keyStart, keyLength, + hashMapResults[hashMapResultCount], matchTracker); /* * Common outer join result processing. @@ -356,7 +319,6 @@ public void process(Object row, int tag) throws HiveException { equalKeySeriesIsSingleValue[equalKeySeriesCount] = hashMapResults[hashMapResultCount].isSingleRow(); equalKeySeriesDuplicateCounts[equalKeySeriesCount] = 1; allMatchs[allMatchCount++] = batchIndex; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH isSingleValue " + equalKeySeriesIsSingleValue[equalKeySeriesCount] + " currentKey " + currentKey); break; case SPILL: @@ -367,11 +329,9 @@ public void process(Object row, int tag) throws HiveException { case NOMATCH: atLeastOneNonMatch = true; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH" + " currentKey " + currentKey); break; } } else { - // LOG.debug(CLASS_NAME + " logical " + logical + " batchIndex " + batchIndex + " Key Continues " + saveKey + " " + saveJoinResult.name()); // Series of equal keys. @@ -379,7 +339,6 @@ public void process(Object row, int tag) throws HiveException { case MATCH: equalKeySeriesDuplicateCounts[equalKeySeriesCount]++; allMatchs[allMatchCount++] = batchIndex; - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " MATCH duplicate"); break; case SPILL: @@ -389,13 +348,9 @@ public void process(Object row, int tag) throws HiveException { break; case NOMATCH: - // VectorizedBatchUtil.debugDisplayOneRow(batch, batchIndex, CLASS_NAME + " NOMATCH duplicate"); break; } } - // if (!verifyMonotonicallyIncreasing(allMatchs, allMatchCount)) { - // throw new HiveException("allMatchs is not in sort order and unique"); - // } } } @@ -437,7 +392,9 @@ public void process(Object row, int tag) throws HiveException { } if (batch.size > 0) { - // Forward any remaining selected rows. + + // Forward any rows in the Big Table batch that had results added (they will be selected). + // NOTE: Other result rows may have been generated in the overflowBatch. forwardBigTableBatch(batch); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java index 59694602ea..add8b9c997 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMap.java @@ -19,8 +19,13 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; import org.apache.hadoop.io.BytesWritable; import org.apache.hive.common.util.HashCodeUtil; import org.slf4j.Logger; @@ -41,11 +46,112 @@ protected BytesWritable testValueBytesWritable; + private long fullOuterNullKeyRefWord; + + private static class NonMatchedBytesHashMapIterator extends VectorMapJoinFastNonMatchedIterator { + + private VectorMapJoinFastBytesHashMap hashMap; + + private boolean noMore; + private boolean keyIsNull; + + private WriteBuffers.Position nonMatchedReadPos; + + private ByteSegmentRef nonMatchedKeyByteSegmentRef; + + private VectorMapJoinFastBytesHashMapStore.HashMapResult nonMatchedHashMapResult; + + NonMatchedBytesHashMapIterator(MatchTracker matchTracker, + VectorMapJoinFastBytesHashMap hashMap) { + super(matchTracker); + this.hashMap = hashMap; + } + + @Override + public void init() { + super.init(); + noMore = false; + keyIsNull = false; + nonMatchedReadPos = new WriteBuffers.Position(); + nonMatchedKeyByteSegmentRef = new ByteSegmentRef(); + nonMatchedHashMapResult = new VectorMapJoinFastBytesHashMapStore.HashMapResult(); + } + + @Override + public boolean findNextNonMatched() { + if (noMore) { + return false; + } + while (true) { + nonMatchedLogicalSlotNum++; + if (nonMatchedLogicalSlotNum >= hashMap.logicalHashBucketCount) { + + // Fall below and handle Small Table NULL key. + break; + } + final long refWord = hashMap.slots[nonMatchedLogicalSlotNum]; + if (refWord != 0) { + if (!matchTracker.wasMatched(nonMatchedLogicalSlotNum)) { + nonMatchedHashMapResult.set(hashMap.hashMapStore, refWord); + keyIsNull = false; + return true; + } + } + } + + // Do we have a Small Table NULL Key? + if (hashMap.fullOuterNullKeyRefWord == 0) { + return false; + } + nonMatchedHashMapResult.set(hashMap.hashMapStore, hashMap.fullOuterNullKeyRefWord); + noMore = true; + keyIsNull = true; + return true; + } + + @Override + public boolean readNonMatchedBytesKey() throws HiveException { + if (keyIsNull) { + return false; + } + hashMap.hashMapStore.getKey( + hashMap.slots[nonMatchedLogicalSlotNum], + nonMatchedKeyByteSegmentRef, + nonMatchedReadPos); + return true; + } + + @Override + public byte[] getNonMatchedBytes() { + return nonMatchedKeyByteSegmentRef.getBytes(); + } + + @Override + public int getNonMatchedBytesOffset() { + return (int) nonMatchedKeyByteSegmentRef.getOffset(); + } + + @Override + public int getNonMatchedBytesLength() { + return nonMatchedKeyByteSegmentRef.getLength(); + } + + @Override + public VectorMapJoinHashMapResult getNonMatchedHashMapResult() { + return nonMatchedHashMapResult; + } + } + @Override public VectorMapJoinHashMapResult createHashMapResult() { return new VectorMapJoinFastBytesHashMapStore.HashMapResult(); } + @Override + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + return new NonMatchedBytesHashMapIterator(matchTracker, this); + } + public void add(byte[] keyBytes, int keyStart, int keyLength, BytesWritable currentValue) { if (resizeThreshold <= keysAssigned) { @@ -123,7 +229,28 @@ public void add(byte[] keyBytes, int keyStart, int keyLength, BytesWritable curr return fastHashMapResult.joinResult(); } - protected final void doHashMapMatch( + @Override + public JoinUtil.JoinResult lookup(byte[] keyBytes, int keyStart, int keyLength, + VectorMapJoinHashMapResult hashMapResult, MatchTracker matchTracker) { + + VectorMapJoinFastBytesHashMapStore.HashMapResult fastHashMapResult = + (VectorMapJoinFastBytesHashMapStore.HashMapResult) hashMapResult; + + fastHashMapResult.forget(); + + long hashCode = HashCodeUtil.murmurHash(keyBytes, keyStart, keyLength); + + final int slot = + doHashMapMatch( + keyBytes, keyStart, keyLength, hashCode, fastHashMapResult); + if (slot != -1 && matchTracker != null) { + matchTracker.trackMatch(slot); + } + + return fastHashMapResult.joinResult(); + } + + protected final int doHashMapMatch( byte[] keyBytes, int keyStart, int keyLength, long hashCode, VectorMapJoinFastBytesHashMapStore.HashMapResult fastHashMapResult) { @@ -138,7 +265,7 @@ protected final void doHashMapMatch( if (refWord == 0) { // Given that we do not delete, an empty slot means no match. - return; + return -1; } else if ( VectorMapJoinFastBytesHashKeyRef.getPartialHashCodeFromRefWord(refWord) == partialHashCode) { @@ -148,22 +275,47 @@ protected final void doHashMapMatch( fastHashMapResult.setKey(hashMapStore, refWord); if (fastHashMapResult.equalKey(keyBytes, keyStart, keyLength)) { fastHashMapResult.setMatch(); - return; + return slot; } } // Some other key (collision) - keep probing. probeSlot += (++i); if (i > largestNumberOfSteps) { // We know we never went that far when we were inserting. - return; + return -1; } slot = (int) (probeSlot & logicalHashBucketMask); } } + private static final byte[] EMPTY_BYTES = new byte[0]; + + public void addFullOuterNullKeyValue(BytesWritable currentValue) { + + byte[] valueBytes = currentValue.getBytes(); + int valueLength = currentValue.getLength(); + + if (fullOuterNullKeyRefWord == 0) { + fullOuterNullKeyRefWord = + hashMapStore.addFirst( + /* partialHashCode */ 0, EMPTY_BYTES, 0, 0, + valueBytes, 0, valueLength); + } else { + + // Add another value. + fullOuterNullKeyRefWord = + hashMapStore.addMore( + fullOuterNullKeyRefWord, valueBytes, 0, valueLength, unsafeReadPos); + } + } + public VectorMapJoinFastBytesHashMap( + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + fullOuterNullKeyRefWord = 0; hashMapStore = new VectorMapJoinFastBytesHashMapStore(writeBuffersSize); writeBuffers = hashMapStore.getWriteBuffers(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMapStore.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMapStore.java index dda4a8555a..b71ebb6244 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMapStore.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMapStore.java @@ -205,8 +205,7 @@ public HashMapResult() { /** * Setup for reading the key of an entry with the equalKey method. * @param hashMapStore - * @param part1Word - * @param part2Word + * @param refWord */ public void setKey(VectorMapJoinFastBytesHashMapStore hashMapStore, long refWord) { @@ -280,6 +279,16 @@ public void setMatch() { setJoinResult(JoinResult.MATCH); } + /** + * Setup for a match outright. + * @param hashMapStore + * @param refWord + */ + public void set(VectorMapJoinFastBytesHashMapStore hashMapStore, long refWord) { + setKey(hashMapStore, refWord); + setMatch(); + } + @Override public boolean hasRows() { return hasRows; @@ -546,6 +555,23 @@ public long addMore(long refWord, byte[] valueBytes, int valueStart, int valueLe return refWord; } + public void getKey(long refWord, ByteSegmentRef keyByteSegmentRef, + WriteBuffers.Position readPos) { + + final long absoluteOffset = KeyRef.getAbsoluteOffset(refWord); + + writeBuffers.setReadPoint(absoluteOffset, readPos); + + int keyLength = KeyRef.getSmallKeyLength(refWord); + boolean isKeyLengthSmall = (keyLength != KeyRef.SmallKeyLength.allBitsOn); + if (!isKeyLengthSmall) { + + // Read big key length we wrote with the key. + keyLength = writeBuffers.readVInt(readPos); + } + writeBuffers.getByteSegmentRefToCurrent(keyByteSegmentRef, keyLength, readPos); + } + public VectorMapJoinFastBytesHashMapStore(int writeBuffersSize) { writeBuffers = new WriteBuffers(writeBuffersSize, KeyRef.AbsoluteOffset.maxSize); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java index 849eeb427d..5ec90b40b9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashMultiSet.java @@ -162,8 +162,11 @@ protected final void doHashMultiSetContains( } public VectorMapJoinFastBytesHashMultiSet( + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); hashMultiSetStore = new VectorMapJoinFastBytesHashMultiSetStore(writeBuffersSize); writeBuffers = hashMultiSetStore.getWriteBuffers(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java index 737b4d0b42..7c73aa6f9c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashSet.java @@ -153,8 +153,11 @@ protected final void doHashSetContains( } public VectorMapJoinFastBytesHashSet( + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); hashSetStore = new VectorMapJoinFastBytesHashSetStore(writeBuffersSize); writeBuffers = hashSetStore.getWriteBuffers(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java index 223eec3e8d..3d45a54728 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastBytesHashTable.java @@ -128,8 +128,11 @@ private void allocateBucketArray() { } public VectorMapJoinFastBytesHashTable( - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); unsafeReadPos = new WriteBuffers.Position(); allocateBucketArray(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java index 3e91667c20..806e075fdf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMap.java @@ -31,8 +31,10 @@ public VectorMapJoinHashMapResult createHashMapResult() { } public VectorMapJoinFastHashMap( - boolean isOuterJoin, - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMultiSet.java index fa3d548f00..338a360977 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashMultiSet.java @@ -41,8 +41,10 @@ public void set(long count) { } public VectorMapJoinFastHashMultiSet( - boolean isOuterJoin, - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashSet.java index 4f905643ca..9fdcad7f9e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashSet.java @@ -37,8 +37,10 @@ public VectorMapJoinHashSetResult createHashSetResult() { } public VectorMapJoinFastHashSet( - boolean isOuterJoin, + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java index cbcc9b1ba5..2d05eab6bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastHashTable.java @@ -22,15 +22,19 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.mapjoin.MapJoinMemoryExhaustionError; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTable; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; public abstract class VectorMapJoinFastHashTable implements VectorMapJoinHashTable { public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastHashTable.class); + protected final boolean isFullOuter; + protected int logicalHashBucketCount; protected int logicalHashBucketMask; - protected float loadFactor; + protected final float loadFactor; protected final int writeBuffersSize; protected long estimatedKeyCount; @@ -69,7 +73,10 @@ private static int nextHighestPowerOfTwo(int v) { } public VectorMapJoinFastHashTable( - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + + this.isFullOuter = isFullOuter; initialCapacity = (Long.bitCount(initialCapacity) == 1) ? initialCapacity : nextHighestPowerOfTwo(initialCapacity); @@ -93,7 +100,27 @@ public int size() { @Override public long getEstimatedMemorySize() { + int size = 0; JavaDataModel jdm = JavaDataModel.get(); - return JavaDataModel.alignUp(10L * jdm.primitive1() + jdm.primitive2(), jdm.memoryAlign()); + size += JavaDataModel.alignUp(10L * jdm.primitive1() + jdm.primitive2(), jdm.memoryAlign()); + if (isFullOuter) { + size += MatchTracker.calculateEstimatedMemorySize(logicalHashBucketCount); + } + return size; + } + + @Override + public MatchTracker createMatchTracker() { + return MatchTracker.create(logicalHashBucketCount); + } + + @Override + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + throw new RuntimeException("Not implemented"); + } + + @Override + public int spillPartitionId() { + throw new RuntimeException("Not implemented"); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java index b6684e0468..a7e9739f52 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastKeyStore.java @@ -22,6 +22,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; // Optimized for sequential key lookup. @@ -124,13 +125,11 @@ public boolean unsafeEqualKey(long keyRefWord, byte[] keyBytes, int keyStart, in public boolean equalKey(long keyRefWord, byte[] keyBytes, int keyStart, int keyLength, WriteBuffers.Position readPos) { - int storedKeyLengthLength = + int storedKeyLength = (int) ((keyRefWord & SmallKeyLength.bitMask) >> SmallKeyLength.bitShift); - boolean isKeyLengthSmall = (storedKeyLengthLength != SmallKeyLength.allBitsOn); + boolean isKeyLengthSmall = (storedKeyLength != SmallKeyLength.allBitsOn); - // LOG.debug("VectorMapJoinFastKeyStore equalKey keyLength " + keyLength + " isKeyLengthSmall " + isKeyLengthSmall + " storedKeyLengthLength " + storedKeyLengthLength + " keyRefWord " + Long.toHexString(keyRefWord)); - - if (isKeyLengthSmall && storedKeyLengthLength != keyLength) { + if (isKeyLengthSmall && storedKeyLength != keyLength) { return false; } long absoluteKeyOffset = @@ -139,16 +138,14 @@ public boolean equalKey(long keyRefWord, byte[] keyBytes, int keyStart, int keyL writeBuffers.setReadPoint(absoluteKeyOffset, readPos); if (!isKeyLengthSmall) { // Read big value length we wrote with the value. - storedKeyLengthLength = writeBuffers.readVInt(readPos); - if (storedKeyLengthLength != keyLength) { - // LOG.debug("VectorMapJoinFastKeyStore equalKey no match big length"); + storedKeyLength = writeBuffers.readVInt(readPos); + if (storedKeyLength != keyLength) { return false; } } // Our reading is positioned to the key. if (!writeBuffers.isEqual(keyBytes, keyStart, readPos, keyLength)) { - // LOG.debug("VectorMapJoinFastKeyStore equalKey no match on bytes"); return false; } @@ -167,6 +164,25 @@ public VectorMapJoinFastKeyStore(WriteBuffers writeBuffers) { unsafeReadPos = new WriteBuffers.Position(); } + public void getKey(long keyRefWord, ByteSegmentRef keyByteSegmentRef, + WriteBuffers.Position readPos) { + + int storedKeyLength = + (int) ((keyRefWord & SmallKeyLength.bitMask) >> SmallKeyLength.bitShift); + boolean isKeyLengthSmall = (storedKeyLength != SmallKeyLength.allBitsOn); + + long absoluteKeyOffset = + (keyRefWord & AbsoluteKeyOffset.bitMask); + + writeBuffers.setReadPoint(absoluteKeyOffset, readPos); + if (!isKeyLengthSmall) { + + // Read big key length we wrote with the key. + storedKeyLength = writeBuffers.readVInt(readPos); + } + writeBuffers.getByteSegmentRefToCurrent(keyByteSegmentRef, storedKeyLength, readPos); + } + @Override public long getEstimatedMemorySize() { long size = 0; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java index f42430dbf8..a4cda921a5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMap.java @@ -22,13 +22,17 @@ import org.apache.hadoop.hive.common.MemoryEstimate; import org.apache.hadoop.hive.ql.util.JavaDataModel; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +// import org.slf4j.Logger; +// import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; +import org.apache.hadoop.hive.serde2.WriteBuffers; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; import org.apache.hadoop.io.BytesWritable; import org.apache.hive.common.util.HashCodeUtil; @@ -41,17 +45,115 @@ extends VectorMapJoinFastLongHashTable implements VectorMapJoinLongHashMap, MemoryEstimate { - public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashMap.class); + // public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashMap.class); protected VectorMapJoinFastValueStore valueStore; private BytesWritable testValueBytesWritable; + private long fullOuterNullKeyValueRef; + + private static class NonMatchedLongHashMapIterator extends VectorMapJoinFastNonMatchedIterator { + + private VectorMapJoinFastLongHashMap hashMap; + + private boolean noMore; + private boolean keyIsNull; + + private WriteBuffers.Position nonMatchedReadPos; + + private ByteSegmentRef nonMatchedKeyByteSegmentRef; + + private VectorMapJoinFastValueStore.HashMapResult nonMatchedHashMapResult; + + NonMatchedLongHashMapIterator(MatchTracker matchTracker, + VectorMapJoinFastLongHashMap hashMap) { + super(matchTracker); + this.hashMap = hashMap; + } + + @Override + public void init() { + super.init(); + noMore = false; + keyIsNull = false; + nonMatchedHashMapResult = new VectorMapJoinFastValueStore.HashMapResult(); + } + + @Override + public boolean findNextNonMatched() { + if (noMore) { + return false; + } + while (true) { + nonMatchedLogicalSlotNum++; + if (nonMatchedLogicalSlotNum >= hashMap.logicalHashBucketCount){ + + // Fall below and handle Small Table NULL key. + break; + } + final int nonMatchedDoubleIndex = nonMatchedLogicalSlotNum * 2; + if (hashMap.slotPairs[nonMatchedDoubleIndex] != 0) { + if (!matchTracker.wasMatched(nonMatchedLogicalSlotNum)) { + nonMatchedHashMapResult.set( + hashMap.valueStore, hashMap.slotPairs[nonMatchedDoubleIndex]); + keyIsNull = false; + return true; + } + } + } + + // Do we have a Small Table NULL Key? + if (hashMap.fullOuterNullKeyValueRef == 0) { + return false; + } + nonMatchedHashMapResult.set( + hashMap.valueStore, hashMap.fullOuterNullKeyValueRef); + noMore = true; + keyIsNull = true; + return true; + } + + @Override + public boolean readNonMatchedLongKey() { + return !keyIsNull; + } + + @Override + public long getNonMatchedLongKey() { + return hashMap.slotPairs[nonMatchedLogicalSlotNum * 2 + 1]; + } + + @Override + public VectorMapJoinHashMapResult getNonMatchedHashMapResult() { + return nonMatchedHashMapResult; + } + } + @Override public VectorMapJoinHashMapResult createHashMapResult() { return new VectorMapJoinFastValueStore.HashMapResult(); } + @Override + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + return new NonMatchedLongHashMapIterator(matchTracker, this); + } + + @Override + public void putRow(BytesWritable currentKey, BytesWritable currentValue) + throws HiveException, IOException { + + if (!adaptPutRow(currentKey, currentValue)) { + + // Ignore NULL keys, except for FULL OUTER. + if (isFullOuter) { + addFullOuterNullKeyValue(currentValue); + } + + } + } + /* * A Unit Test convenience method for putting key and value into the hash table using the * actual types. @@ -91,13 +193,12 @@ public void assignSlot(int slot, long key, boolean isNewKey, BytesWritable curre optimizedHashMapResult.forget(); long hashCode = HashCodeUtil.calculateLongHashCode(key); - // LOG.debug("VectorMapJoinFastLongHashMap lookup " + key + " hashCode " + hashCode); - long valueRef = findReadSlot(key, hashCode); + int pairIndex = findReadSlot(key, hashCode); JoinUtil.JoinResult joinResult; - if (valueRef == -1) { + if (pairIndex == -1) { joinResult = JoinUtil.JoinResult.NOMATCH; } else { - optimizedHashMapResult.set(valueStore, valueRef); + optimizedHashMapResult.set(valueStore, slotPairs[pairIndex]); joinResult = JoinUtil.JoinResult.MATCH; } @@ -107,12 +208,59 @@ public void assignSlot(int slot, long key, boolean isNewKey, BytesWritable curre return joinResult; } + @Override + public JoinUtil.JoinResult lookup(long key, VectorMapJoinHashMapResult hashMapResult, + MatchTracker matchTracker) { + + VectorMapJoinFastValueStore.HashMapResult optimizedHashMapResult = + (VectorMapJoinFastValueStore.HashMapResult) hashMapResult; + + optimizedHashMapResult.forget(); + + long hashCode = HashCodeUtil.calculateLongHashCode(key); + int pairIndex = findReadSlot(key, hashCode); + JoinUtil.JoinResult joinResult; + if (pairIndex == -1) { + joinResult = JoinUtil.JoinResult.NOMATCH; + } else { + if (matchTracker != null) { + matchTracker.trackMatch(pairIndex / 2); + } + optimizedHashMapResult.set(valueStore, slotPairs[pairIndex]); + + joinResult = JoinUtil.JoinResult.MATCH; + } + + optimizedHashMapResult.setJoinResult(joinResult); + + return joinResult; + } + + public void addFullOuterNullKeyValue(BytesWritable currentValue) { + + byte[] valueBytes = currentValue.getBytes(); + int valueLength = currentValue.getLength(); + + if (fullOuterNullKeyValueRef == 0) { + fullOuterNullKeyValueRef = valueStore.addFirst(valueBytes, 0, valueLength); + } else { + + // Add another value. + fullOuterNullKeyValueRef = + valueStore.addMore(fullOuterNullKeyValueRef, valueBytes, 0, valueLength); + } + } + public VectorMapJoinFastLongHashMap( - boolean minMaxEnabled, boolean isOuterJoin, HashTableKeyType hashTableKeyType, + boolean isFullOuter, + boolean minMaxEnabled, + HashTableKeyType hashTableKeyType, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(minMaxEnabled, isOuterJoin, hashTableKeyType, + super( + isFullOuter, minMaxEnabled, hashTableKeyType, initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); valueStore = new VectorMapJoinFastValueStore(writeBuffersSize); + fullOuterNullKeyValueRef = 0; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java index 228fa72602..43f093d906 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashMultiSet.java @@ -42,11 +42,27 @@ public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastLongHashMultiSet.class); + private long fullOuterNullKeyValueCount; + @Override public VectorMapJoinHashMultiSetResult createHashMultiSetResult() { return new VectorMapJoinFastHashMultiSet.HashMultiSetResult(); } + @Override + public void putRow(BytesWritable currentKey, BytesWritable currentValue) + throws HiveException, IOException { + + if (!adaptPutRow(currentKey, currentValue)) { + + // Ignore NULL keys, except for FULL OUTER. + if (isFullOuter) { + fullOuterNullKeyValueCount++; + } + + } + } + /* * A Unit Test convenience method for putting the key into the hash table using the * actual type. @@ -80,12 +96,19 @@ public void assignSlot(int slot, long key, boolean isNewKey, BytesWritable curre optimizedHashMultiSetResult.forget(); long hashCode = HashCodeUtil.calculateLongHashCode(key); - long count = findReadSlot(key, hashCode); + int pairIndex = findReadSlot(key, hashCode); JoinUtil.JoinResult joinResult; - if (count == -1) { + if (pairIndex == -1) { joinResult = JoinUtil.JoinResult.NOMATCH; } else { - optimizedHashMultiSetResult.set(count); + /* + * NOTE: Support for trackMatched not needed yet for Set. + + if (matchTracker != null) { + matchTracker.trackMatch(pairIndex / 2); + } + */ + optimizedHashMultiSetResult.set(slotPairs[pairIndex]); joinResult = JoinUtil.JoinResult.MATCH; } @@ -95,10 +118,15 @@ public void assignSlot(int slot, long key, boolean isNewKey, BytesWritable curre } public VectorMapJoinFastLongHashMultiSet( - boolean minMaxEnabled, boolean isOuterJoin, HashTableKeyType hashTableKeyType, + boolean isFullOuter, + boolean minMaxEnabled, + HashTableKeyType hashTableKeyType, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(minMaxEnabled, isOuterJoin, hashTableKeyType, + super( + isFullOuter, + minMaxEnabled, hashTableKeyType, initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + fullOuterNullKeyValueCount = 0; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java index 4c049cb03b..8dce5b82d3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashSet.java @@ -47,6 +47,14 @@ public VectorMapJoinHashSetResult createHashSetResult() { return new VectorMapJoinFastHashSet.HashSetResult(); } + @Override + public void putRow(BytesWritable currentKey, BytesWritable currentValue) + throws HiveException, IOException { + + // Ignore NULL keys (HashSet not used for FULL OUTER). + adaptPutRow(currentKey, currentValue); + } + /* * A Unit Test convenience method for putting the key into the hash table using the * actual type. @@ -76,11 +84,18 @@ public JoinResult contains(long key, VectorMapJoinHashSetResult hashSetResult) { optimizedHashSetResult.forget(); long hashCode = HashCodeUtil.calculateLongHashCode(key); - long existance = findReadSlot(key, hashCode); + int pairIndex = findReadSlot(key, hashCode); JoinUtil.JoinResult joinResult; - if (existance == -1) { + if (pairIndex == -1) { joinResult = JoinUtil.JoinResult.NOMATCH; } else { + /* + * NOTE: Support for trackMatched not needed yet for Set. + + if (matchTracker != null) { + matchTracker.trackMatch(pairIndex / 2); + } + */ joinResult = JoinUtil.JoinResult.MATCH; } @@ -91,9 +106,13 @@ public JoinResult contains(long key, VectorMapJoinHashSetResult hashSetResult) { } public VectorMapJoinFastLongHashSet( - boolean minMaxEnabled, boolean isOuterJoin, HashTableKeyType hashTableKeyType, + boolean isFullOuter, + boolean minMaxEnabled, + HashTableKeyType hashTableKeyType, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(minMaxEnabled, isOuterJoin, hashTableKeyType, + super( + isFullOuter, + minMaxEnabled, hashTableKeyType, initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java index c9c3e808ef..03ef249241 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastLongHashTable.java @@ -49,8 +49,6 @@ private final HashTableKeyType hashTableKeyType; - private final boolean isOuterJoin; - private final BinarySortableDeserializeRead keyBinarySortableDeserializeRead; private final boolean useMinMax; @@ -72,14 +70,13 @@ public long max() { return max; } - @Override - public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { + public boolean adaptPutRow(BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { byte[] keyBytes = currentKey.getBytes(); int keyLength = currentKey.getLength(); keyBinarySortableDeserializeRead.set(keyBytes, 0, keyLength); try { if (!keyBinarySortableDeserializeRead.readNextField()) { - return; + return false; } } catch (Exception e) { throw new HiveException( @@ -92,6 +89,7 @@ public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws keyBinarySortableDeserializeRead, hashTableKeyType); add(key, currentValue); + return true; } protected abstract void assignSlot(int slot, long key, boolean isNewKey, BytesWritable currentValue); @@ -215,10 +213,9 @@ private void expandAndRehash() { largestNumberOfSteps = newLargestNumberOfSteps; resizeThreshold = (int)(logicalHashBucketCount * loadFactor); metricExpands++; - // LOG.debug("VectorMapJoinFastLongHashTable expandAndRehash new logicalHashBucketCount " + logicalHashBucketCount + " resizeThreshold " + resizeThreshold + " metricExpands " + metricExpands); } - protected long findReadSlot(long key, long hashCode) { + protected int findReadSlot(long key, long hashCode) { int intHashCode = (int) hashCode; int slot = intHashCode & logicalHashBucketMask; @@ -230,20 +227,16 @@ protected long findReadSlot(long key, long hashCode) { long valueRef = slotPairs[pairIndex]; if (valueRef == 0) { // Given that we do not delete, an empty slot means no match. - // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot key " + key + " slot " + slot + " pairIndex " + pairIndex + " empty slot (i = " + i + ")"); return -1; } long tableKey = slotPairs[pairIndex + 1]; if (key == tableKey) { - // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot key " + key + " slot " + slot + " pairIndex " + pairIndex + " found key (i = " + i + ")"); - return slotPairs[pairIndex]; + return pairIndex; } // Some other key (collision) - keep probing. probeSlot += (++i); if (i > largestNumberOfSteps) { - // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot returning not found"); // We know we never went that far when we were inserting. - // LOG.debug("VectorMapJoinFastLongHashTable findReadSlot key " + key + " slot " + slot + " pairIndex " + pairIndex + " largestNumberOfSteps " + largestNumberOfSteps + " (i = " + i + ")"); return -1; } slot = (int)(probeSlot & logicalHashBucketMask); @@ -268,10 +261,13 @@ private void allocateBucketArray() { } public VectorMapJoinFastLongHashTable( - boolean minMaxEnabled, boolean isOuterJoin, HashTableKeyType hashTableKeyType, + boolean isFullOuter, + boolean minMaxEnabled, + HashTableKeyType hashTableKeyType, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); - this.isOuterJoin = isOuterJoin; + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); this.hashTableKeyType = hashTableKeyType; PrimitiveTypeInfo[] primitiveTypeInfos = { hashTableKeyType.getPrimitiveTypeInfo() }; keyBinarySortableDeserializeRead = diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMap.java index 2798010cfe..4ab1601bd2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMap.java @@ -49,9 +49,11 @@ public void testPutRow(byte[] currentKey, byte[] currentValue) throws HiveExcept } public VectorMapJoinFastMultiKeyHashMap( - boolean isOuterJoin, - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMultiSet.java index 0560281543..960115f95f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashMultiSet.java @@ -47,9 +47,11 @@ public void testPutRow(byte[] currentKey) throws HiveException, IOException { } public VectorMapJoinFastMultiKeyHashMultiSet( - boolean isOuterJoin, - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashSet.java index 900ca550b7..9c1183b942 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastMultiKeyHashSet.java @@ -47,9 +47,11 @@ public void testPutRow(byte[] currentKey) throws HiveException, IOException { } public VectorMapJoinFastMultiKeyHashSet( - boolean isOuterJoin, - int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + boolean isFullOuter, + int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastNonMatchedIterator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastNonMatchedIterator.java new file mode 100644 index 0000000000..8cdad409b2 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastNonMatchedIterator.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; + +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; + +/** + * The abstract class for vectorized non-match Small Table key iteration. + */ +public abstract class VectorMapJoinFastNonMatchedIterator + extends VectorMapJoinNonMatchedIterator { + + protected int nonMatchedLogicalSlotNum; + + public VectorMapJoinFastNonMatchedIterator(MatchTracker matchTracker) { + super(matchTracker); + } + + @Override + public void init() { + nonMatchedLogicalSlotNum = -1; + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringCommon.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringCommon.java index 777eb456e7..1b108a8c14 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringCommon.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringCommon.java @@ -35,11 +35,9 @@ public static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinFastStringCommon.class); - private boolean isOuterJoin; - private BinarySortableDeserializeRead keyBinarySortableDeserializeRead; - public void adaptPutRow(VectorMapJoinFastBytesHashTable hashTable, + public boolean adaptPutRow(VectorMapJoinFastBytesHashTable hashTable, BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { byte[] keyBytes = currentKey.getBytes(); @@ -47,7 +45,7 @@ public void adaptPutRow(VectorMapJoinFastBytesHashTable hashTable, keyBinarySortableDeserializeRead.set(keyBytes, 0, keyLength); try { if (!keyBinarySortableDeserializeRead.readNextField()) { - return; + return false; } } catch (Exception e) { throw new HiveException( @@ -61,14 +59,14 @@ public void adaptPutRow(VectorMapJoinFastBytesHashTable hashTable, keyBinarySortableDeserializeRead.currentBytesStart, keyBinarySortableDeserializeRead.currentBytesLength, currentValue); + return true; } - public VectorMapJoinFastStringCommon(boolean isOuterJoin) { - this.isOuterJoin = isOuterJoin; + public VectorMapJoinFastStringCommon() { PrimitiveTypeInfo[] primitiveTypeInfos = { TypeInfoFactory.stringTypeInfo }; keyBinarySortableDeserializeRead = new BinarySortableDeserializeRead( primitiveTypeInfos, /* useExternalBuffer */ false); } -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMap.java index fc4edda6be..446feb2526 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMap.java @@ -34,14 +34,22 @@ @Override public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { - stringCommon.adaptPutRow(this, currentKey, currentValue); + if (!stringCommon.adaptPutRow(this, currentKey, currentValue)) { + + // Ignore NULL keys, except for FULL OUTER. + if (isFullOuter) { + addFullOuterNullKeyValue(currentValue); + } + } } public VectorMapJoinFastStringHashMap( - boolean isOuterJoin, + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); - stringCommon = new VectorMapJoinFastStringCommon(isOuterJoin); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + stringCommon = new VectorMapJoinFastStringCommon(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMultiSet.java index 3dbdfa7f06..c28ef9be2b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashMultiSet.java @@ -30,18 +30,29 @@ */ public class VectorMapJoinFastStringHashMultiSet extends VectorMapJoinFastBytesHashMultiSet { - private VectorMapJoinFastStringCommon stringCommon; + private final VectorMapJoinFastStringCommon stringCommon; + + private long fullOuterNullKeyValueCount; @Override public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { - stringCommon.adaptPutRow(this, currentKey, currentValue); + if (!stringCommon.adaptPutRow(this, currentKey, currentValue)) { + + // Ignore NULL keys, except for FULL OUTER. + if (isFullOuter) { + fullOuterNullKeyValueCount++; + } + } } public VectorMapJoinFastStringHashMultiSet( - boolean isOuterJoin, + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); - stringCommon = new VectorMapJoinFastStringCommon(isOuterJoin); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + fullOuterNullKeyValueCount = 0; + stringCommon = new VectorMapJoinFastStringCommon(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashSet.java index 84f8439751..17bd5fda93 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastStringHashSet.java @@ -33,15 +33,20 @@ private VectorMapJoinFastStringCommon stringCommon; @Override - public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws HiveException, IOException { + public void putRow(BytesWritable currentKey, BytesWritable currentValue) + throws HiveException, IOException { + + // Ignore NULL keys (HashSet not used for FULL OUTER). stringCommon.adaptPutRow(this, currentKey, currentValue); } public VectorMapJoinFastStringHashSet( - boolean isOuterJoin, + boolean isFullOuter, int initialCapacity, float loadFactor, int writeBuffersSize, long estimatedKeyCount) { - super(initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); - stringCommon = new VectorMapJoinFastStringCommon(isOuterJoin); + super( + isFullOuter, + initialCapacity, loadFactor, writeBuffersSize, estimatedKeyCount); + stringCommon = new VectorMapJoinFastStringCommon(); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java index 24dfa5d9a7..e8dcbf18cb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VectorMapJoinFastTableContainer.java @@ -27,6 +27,8 @@ import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.NonMatchedSmallTableIterator; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTable; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinTableContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -73,11 +75,6 @@ public VectorMapJoinFastTableContainer(MapJoinDesc desc, Configuration hconf, this.estimatedKeyCount = estimatedKeyCount; - // LOG.info("VectorMapJoinFastTableContainer load keyCountAdj " + keyCountAdj); - // LOG.info("VectorMapJoinFastTableContainer load threshold " + threshold); - // LOG.info("VectorMapJoinFastTableContainer load loadFactor " + loadFactor); - // LOG.info("VectorMapJoinFastTableContainer load wbSize " + wbSize); - int newThreshold = HashMapWrapper.calculateTableSize( keyCountAdj, threshold, loadFactor, estimatedKeyCount); @@ -93,13 +90,11 @@ public VectorMapJoinHashTable vectorMapJoinHashTable() { private VectorMapJoinFastHashTable createHashTable(int newThreshold) { - boolean isOuterJoin = !desc.isNoOuterJoin(); - - // UNDONE VectorMapJoinDesc vectorDesc = (VectorMapJoinDesc) desc.getVectorDesc(); HashTableImplementationType hashTableImplementationType = vectorDesc.getHashTableImplementationType(); HashTableKind hashTableKind = vectorDesc.getHashTableKind(); HashTableKeyType hashTableKeyType = vectorDesc.getHashTableKeyType(); + boolean isFullOuter = vectorDesc.getIsFullOuter(); boolean minMaxEnabled = vectorDesc.getMinMaxEnabled(); int writeBufferSize = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEHASHTABLEWBSIZE); @@ -115,18 +110,24 @@ private VectorMapJoinFastHashTable createHashTable(int newThreshold) { switch (hashTableKind) { case HASH_MAP: hashTable = new VectorMapJoinFastLongHashMap( - minMaxEnabled, isOuterJoin, hashTableKeyType, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + minMaxEnabled, + hashTableKeyType, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; case HASH_MULTISET: hashTable = new VectorMapJoinFastLongHashMultiSet( - minMaxEnabled, isOuterJoin, hashTableKeyType, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + minMaxEnabled, + hashTableKeyType, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; case HASH_SET: hashTable = new VectorMapJoinFastLongHashSet( - minMaxEnabled, isOuterJoin, hashTableKeyType, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + minMaxEnabled, + hashTableKeyType, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; } break; @@ -135,18 +136,18 @@ private VectorMapJoinFastHashTable createHashTable(int newThreshold) { switch (hashTableKind) { case HASH_MAP: hashTable = new VectorMapJoinFastStringHashMap( - isOuterJoin, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; case HASH_MULTISET: hashTable = new VectorMapJoinFastStringHashMultiSet( - isOuterJoin, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; case HASH_SET: hashTable = new VectorMapJoinFastStringHashSet( - isOuterJoin, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; } break; @@ -155,18 +156,18 @@ private VectorMapJoinFastHashTable createHashTable(int newThreshold) { switch (hashTableKind) { case HASH_MAP: hashTable = new VectorMapJoinFastMultiKeyHashMap( - isOuterJoin, + isFullOuter, newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; case HASH_MULTISET: hashTable = new VectorMapJoinFastMultiKeyHashMultiSet( - isOuterJoin, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; case HASH_SET: hashTable = new VectorMapJoinFastMultiKeyHashSet( - isOuterJoin, - newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); + isFullOuter, + newThreshold, loadFactor, writeBufferSize, estimatedKeyCount); break; } break; @@ -194,6 +195,12 @@ public ReusableGetAdaptor createGetter(MapJoinKey keyTypeFromLoader) { throw new RuntimeException("Not applicable"); } + @Override + public NonMatchedSmallTableIterator createNonMatchedSmallTableIterator( + MatchTracker matchTracker) { + throw new RuntimeException("Not applicable"); + } + @Override public void clear() { // Do nothing @@ -236,11 +243,4 @@ public void setSerde(MapJoinObjectSerDeContext keyCtx, MapJoinObjectSerDeContext // Do nothing in this case. } - - /* - @Override - public com.esotericsoftware.kryo.io.Output getHybridBigTableSpillOutput(int partitionId) { - throw new RuntimeException("Not applicable"); - } - */ } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinBytesHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinBytesHashMap.java index 2408484c19..b50a098fa8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinBytesHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinBytesHashMap.java @@ -21,6 +21,7 @@ import java.io.IOException; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; /* * The interface for a single byte array key hash map lookup method. @@ -41,6 +42,9 @@ * The object to receive small table value(s) information on a MATCH. * Or, for SPILL, it has information on where to spill the big table row. * + * NOTE: Since the hash table can be shared, the hashMapResult serves as the non-shared + * private object for our accessing the hash table lookup values, etc. + * * @return * Whether the lookup was a match, no match, or spill (the partition with the key * is currently spilled). @@ -48,4 +52,16 @@ JoinUtil.JoinResult lookup(byte[] keyBytes, int keyStart, int keyLength, VectorMapJoinHashMapResult hashMapResult) throws IOException; + /* + * A version of lookup with match tracking. + * ... + * * @param matchTracker + * Optional key match tracking. + * + * NOTE: Since the hash table can be shared, the matchTracker serves as the non-shared + * private object for tracking our key matches in the hash table. + * ... + */ + JoinUtil.JoinResult lookup(byte[] keyBytes, int keyStart, int keyLength, + VectorMapJoinHashMapResult hashMapResult, MatchTracker matchTracker) throws IOException; } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashMap.java index 2d2490c03a..5762cffc2f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashMap.java @@ -30,5 +30,4 @@ * access spill information when the partition with the key is currently spilled. */ VectorMapJoinHashMapResult createHashMapResult(); - } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashTable.java index e49da0474e..ce5c597316 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinHashTable.java @@ -21,6 +21,7 @@ import java.io.IOException; import org.apache.hadoop.hive.common.MemoryEstimate; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.io.BytesWritable; @@ -31,7 +32,6 @@ */ public interface VectorMapJoinHashTable extends MemoryEstimate { - /* * @param currentKey * The current key. @@ -45,4 +45,10 @@ void putRow(BytesWritable currentKey, BytesWritable currentValue) * Get hash table size */ int size(); + + MatchTracker createMatchTracker(); + + VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker); + + int spillPartitionId(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashMap.java index ba68d35a4d..593f784380 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashMap.java @@ -21,6 +21,7 @@ import java.io.IOException; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; /* * The interface for a single long key hash map lookup method. @@ -43,4 +44,16 @@ */ JoinUtil.JoinResult lookup(long key, VectorMapJoinHashMapResult hashMapResult) throws IOException; + /* + * A version of lookup with match tracking. + * ... + * @param matchTracker + * Optional key match tracking. + * + * NOTE: Since the hash table can be shared, the matchTracker serves as the non-shared + * private object for tracking our key matches in the hash table. + * ... + */ + JoinUtil.JoinResult lookup(long key, VectorMapJoinHashMapResult hashMapResult, + MatchTracker matchTracker) throws IOException; } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashTable.java index d0f9dcb51e..74cfb9ceea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinLongHashTable.java @@ -27,5 +27,4 @@ boolean useMinMax(); long min(); long max(); - } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinNonMatchedIterator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinNonMatchedIterator.java new file mode 100644 index 0000000000..e87345a9a0 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/hashtable/VectorMapJoinNonMatchedIterator.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable; + +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * The abstract class for vectorized non-match Small Table key iteration. + */ +public abstract class VectorMapJoinNonMatchedIterator { + + protected final MatchTracker matchTracker; + + protected int nonMatchedLogicalSlotNum; + + public VectorMapJoinNonMatchedIterator(MatchTracker matchTracker) { + this.matchTracker = matchTracker; + } + + public void init() { + nonMatchedLogicalSlotNum = -1; + } + + public boolean findNextNonMatched() { + throw new RuntimeException("Not implemented"); + } + + public boolean readNonMatchedLongKey() throws HiveException { + throw new RuntimeException("Not implemented"); + } + + public long getNonMatchedLongKey() throws HiveException { + throw new RuntimeException("Not implemented"); + } + + public boolean readNonMatchedBytesKey() throws HiveException { + throw new RuntimeException("Not implemented"); + } + + public byte[] getNonMatchedBytes() { + throw new RuntimeException("Not implemented"); + } + + public int getNonMatchedBytesOffset() { + throw new RuntimeException("Not implemented"); + } + + public int getNonMatchedBytesLength() { + throw new RuntimeException("Not implemented"); + } + + public VectorMapJoinHashMapResult getNonMatchedHashMapResult() { + throw new RuntimeException("Not implemented"); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java index f95cd76733..21c355cb42 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedCreateHashTable.java @@ -116,16 +116,4 @@ public static VectorMapJoinOptimizedHashTable createHashTable(MapJoinDesc desc, } return hashTable; } - - /* - @Override - public com.esotericsoftware.kryo.io.Output getHybridBigTableSpillOutput(int partitionId) { - - HybridHashTableContainer ht = (HybridHashTableContainer) mapJoinTableContainer; - - HashPartition hp = ht.getHashPartitions()[partitionId]; - - return hp.getMatchfileOutput(); - } - */ } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMap.java index 9242702a9d..78a3160aff 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMap.java @@ -23,10 +23,13 @@ import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; public class VectorMapJoinOptimizedHashMap @@ -40,13 +43,18 @@ public VectorMapJoinHashMapResult createHashMapResult() { public static class HashMapResult extends VectorMapJoinHashMapResult { - private BytesBytesMultiHashMap.Result bytesBytesMultiHashMapResult; + private final BytesBytesMultiHashMap.Result bytesBytesMultiHashMapResult; public HashMapResult() { super(); bytesBytesMultiHashMapResult = new BytesBytesMultiHashMap.Result(); } + public HashMapResult(BytesBytesMultiHashMap.Result bytesBytesMultiHashMapResult) { + super(); + this.bytesBytesMultiHashMapResult = bytesBytesMultiHashMapResult; + } + public BytesBytesMultiHashMap.Result bytesBytesMultiHashMapResult() { return bytesBytesMultiHashMapResult; } @@ -106,7 +114,59 @@ public String toString() { public String getDetailedHashMapResultPositionString() { return "(Not supported yet)"; } - } + } + + protected static class NonMatchedBytesHashMapIterator + extends VectorMapJoinOptimizedNonMatchedIterator { + + private VectorMapJoinOptimizedHashMap hashMap; + + protected ByteSegmentRef keyRef; + + public NonMatchedBytesHashMapIterator(MatchTracker matchTracker, + VectorMapJoinOptimizedHashMap hashMap) { + super(matchTracker); + this.hashMap = hashMap; + } + + @Override + public void init() { + super.init(); + nonMatchedIterator = + ((MapJoinTableContainer) hashMap.originalTableContainer). + createNonMatchedSmallTableIterator(matchTracker); + } + + public void doReadNonMatchedBytesKey() throws HiveException { + keyRef = nonMatchedIterator.getCurrentKeyAsRef(); + } + + @Override + public boolean readNonMatchedBytesKey() throws HiveException { + doReadNonMatchedBytesKey(); + return true; // We have not interpreted the bytes, so return true. + } + + @Override + public byte[] getNonMatchedBytes() { + return keyRef.getBytes(); + } + + @Override + public int getNonMatchedBytesOffset() { + return (int) keyRef.getOffset(); + } + + @Override + public int getNonMatchedBytesLength() { + return keyRef.getLength(); + } + } + + @Override + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + return new NonMatchedBytesHashMapIterator(matchTracker, this); + } @Override public JoinUtil.JoinResult lookup(byte[] keyBytes, int keyOffset, int keyLength, @@ -117,7 +177,21 @@ public String getDetailedHashMapResultPositionString() { JoinUtil.JoinResult joinResult = doLookup(keyBytes, keyOffset, keyLength, implementationHashMapResult.bytesBytesMultiHashMapResult(), - (VectorMapJoinHashTableResult) hashMapResult); + (VectorMapJoinHashTableResult) hashMapResult, null); + + return joinResult; + } + + @Override + public JoinUtil.JoinResult lookup(byte[] keyBytes, int keyOffset, int keyLength, + VectorMapJoinHashMapResult hashMapResult, MatchTracker matchTracker) throws IOException { + + HashMapResult implementationHashMapResult = (HashMapResult) hashMapResult; + + JoinUtil.JoinResult joinResult = + doLookup(keyBytes, keyOffset, keyLength, + implementationHashMapResult.bytesBytesMultiHashMapResult(), + (VectorMapJoinHashTableResult) hashMapResult, matchTracker); return joinResult; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMultiSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMultiSet.java index 9921a88997..cfe128c855 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMultiSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashMultiSet.java @@ -91,7 +91,7 @@ public void forget() { JoinUtil.JoinResult joinResult = doLookup(keyBytes, keyOffset, keyLength, implementationHashMultiSetResult.bytesBytesMultiHashMapResult(), - (VectorMapJoinHashTableResult) hashMultiSetResult); + (VectorMapJoinHashTableResult) hashMultiSetResult, null); return joinResult; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashSet.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashSet.java index 122f881e79..8f53ada3bb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashSet.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashSet.java @@ -66,7 +66,7 @@ public void forget() { JoinUtil.JoinResult joinResult = doLookup(keyBytes, keyOffset, keyLength, implementationHashSetResult.bytesBytesMultiHashMapResult(), - (VectorMapJoinHashTableResult) hashSetResult); + (VectorMapJoinHashTableResult) hashSetResult, null); return joinResult; } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java index 74887f7ecd..45faa9713a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedHashTable.java @@ -26,21 +26,25 @@ import org.apache.hadoop.hive.ql.exec.JoinUtil; import org.apache.hadoop.hive.ql.exec.persistence.BytesBytesMultiHashMap; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerDirectAccess; import org.apache.hadoop.hive.ql.exec.persistence.ReusableGetAdaptorDirectAccess; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashTable; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTable; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTableResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; -/* +/** * Root interface for a vector map join hash table (which could be a hash map, hash multi-set, or * hash set). */ -public abstract class VectorMapJoinOptimizedHashTable implements VectorMapJoinHashTable { +public abstract class VectorMapJoinOptimizedHashTable + implements VectorMapJoinHashTable, VectorMapJoinBytesHashTable { private static final Logger LOG = LoggerFactory.getLogger(VectorMapJoinOptimizedMultiKeyHashMap.class.getName()); @@ -54,6 +58,16 @@ int length; } + @Override + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + throw new RuntimeException("Not implemented"); + } + + @Override + public int spillPartitionId() { + return adapatorDirectAccess.directSpillPartitionId(); + } + @Override public void putRow(BytesWritable currentKey, BytesWritable currentValue) throws SerDeException, HiveException, IOException { @@ -69,13 +83,13 @@ protected void putRowInternal(BytesWritable key, BytesWritable value) public JoinUtil.JoinResult doLookup(byte[] keyBytes, int keyOffset, int keyLength, BytesBytesMultiHashMap.Result bytesBytesMultiHashMapResult, - VectorMapJoinHashTableResult hashTableResult) { + VectorMapJoinHashTableResult hashTableResult, MatchTracker matchTracker) { hashTableResult.forget(); JoinUtil.JoinResult joinResult = adapatorDirectAccess.setDirect(keyBytes, keyOffset, keyLength, - bytesBytesMultiHashMapResult); + bytesBytesMultiHashMapResult, matchTracker); if (joinResult == JoinUtil.JoinResult.SPILL) { hashTableResult.setSpillPartitionId(adapatorDirectAccess.directSpillPartitionId()); } @@ -105,4 +119,9 @@ public long getEstimatedMemorySize() { size += (2 * JavaDataModel.get().object()); return size; } + + @Override + public MatchTracker createMatchTracker() { + return adapatorDirectAccess.createMatchTracker(); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java index 9c45ed9247..de1ee15c3b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongCommon.java @@ -41,8 +41,6 @@ private HashTableKeyType hashTableKeyType; - // private BinarySortableDeserializeRead keyBinarySortableDeserializeRead; - private BinarySortableSerializeWrite keyBinarySortableSerializeWrite; private transient Output output; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongHashMap.java index b21f0b3f66..42573f0898 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedLongHashMap.java @@ -23,9 +23,16 @@ import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinLongHashMap; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; +import org.apache.hadoop.hive.serde2.WriteBuffers.ByteSegmentRef; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; /* * An single long value hash map based on the BytesBytesMultiHashMap. @@ -37,8 +44,104 @@ extends VectorMapJoinOptimizedHashMap implements VectorMapJoinLongHashMap { + private HashTableKeyType hashTableKeyType; + private VectorMapJoinOptimizedLongCommon longCommon; + private static class NonMatchedLongHashMapIterator + extends VectorMapJoinOptimizedNonMatchedIterator { + + private VectorMapJoinOptimizedLongHashMap hashMap; + + // Extract long with non-shared deserializer object. + private BinarySortableDeserializeRead keyBinarySortableDeserializeRead; + + private long longValue; + + NonMatchedLongHashMapIterator(MatchTracker matchTracker, + VectorMapJoinOptimizedLongHashMap hashMap) { + super(matchTracker); + this.hashMap = hashMap; + } + + @Override + public void init() { + super.init(); + nonMatchedIterator = + ((MapJoinTableContainer) hashMap.originalTableContainer). + createNonMatchedSmallTableIterator(matchTracker); + + TypeInfo integerTypeInfo; + switch (hashMap.hashTableKeyType) { + case BOOLEAN: + integerTypeInfo = TypeInfoFactory.booleanTypeInfo; + break; + case BYTE: + integerTypeInfo = TypeInfoFactory.byteTypeInfo; + break; + case SHORT: + integerTypeInfo = TypeInfoFactory.shortTypeInfo; + break; + case INT: + integerTypeInfo = TypeInfoFactory.intTypeInfo; + break; + case LONG: + integerTypeInfo = TypeInfoFactory.longTypeInfo; + break; + default: + throw new RuntimeException("Unexpected key type " + hashMap.hashTableKeyType); + } + keyBinarySortableDeserializeRead = + new BinarySortableDeserializeRead( + new TypeInfo[] {integerTypeInfo}, false); + } + + private boolean readNonMatchedLongKey(ByteSegmentRef keyRef) throws HiveException { + + try { + byte[] keyBytes = keyRef.getBytes(); + int keyOffset = (int) keyRef.getOffset(); + int keyLength = keyRef.getLength(); + keyBinarySortableDeserializeRead.set(keyBytes, keyOffset, keyLength); + if (!keyBinarySortableDeserializeRead.readNextField()) { + return false; + } + switch (hashMap.hashTableKeyType) { + case BOOLEAN: + longValue = keyBinarySortableDeserializeRead.currentBoolean ? 1 : 0; + break; + case BYTE: + longValue = keyBinarySortableDeserializeRead.currentByte; + break; + case SHORT: + longValue = keyBinarySortableDeserializeRead.currentShort; + break; + case INT: + longValue = keyBinarySortableDeserializeRead.currentInt; + break; + case LONG: + longValue = keyBinarySortableDeserializeRead.currentLong; + break; + default: + throw new RuntimeException("Unexpected key type " + hashMap.hashTableKeyType); + } + } catch (IOException e) { + throw new HiveException(e); + } + return true; + } + + @Override + public boolean readNonMatchedLongKey() throws HiveException { + return readNonMatchedLongKey(nonMatchedIterator.getCurrentKeyAsRef()); + } + + @Override + public long getNonMatchedLongKey() throws HiveException { + return longValue; + } + } + @Override public boolean useMinMax() { return longCommon.useMinMax(); @@ -54,14 +157,10 @@ public long max() { return longCommon.max(); } - /* @Override - public void putRow(BytesWritable currentKey, BytesWritable currentValue) - throws SerDeException, HiveException, IOException { - - longCommon.adaptPutRow((VectorMapJoinOptimizedHashTable) this, currentKey, currentValue); + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + return new NonMatchedLongHashMapIterator(matchTracker, this); } - */ @Override public JoinResult lookup(long key, @@ -73,10 +172,21 @@ public JoinResult lookup(long key, hashMapResult); } + @Override + public JoinResult lookup(long key, + VectorMapJoinHashMapResult hashMapResult, MatchTracker matchTracker) throws IOException { + + SerializedBytes serializedBytes = longCommon.serialize(key); + + return super.lookup(serializedBytes.bytes, serializedBytes.offset, serializedBytes.length, + hashMapResult, matchTracker); + } + public VectorMapJoinOptimizedLongHashMap( boolean minMaxEnabled, boolean isOuterJoin, HashTableKeyType hashTableKeyType, MapJoinTableContainer originalTableContainer, ReusableGetAdaptor hashMapRowGetter) { super(originalTableContainer, hashMapRowGetter); + this.hashTableKeyType = hashTableKeyType; longCommon = new VectorMapJoinOptimizedLongCommon(minMaxEnabled, isOuterJoin, hashTableKeyType); } } \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedMultiKeyHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedMultiKeyHashMap.java index 3e8e6fb8c3..e07bbaae82 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedMultiKeyHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedMultiKeyHashMap.java @@ -27,8 +27,6 @@ public class VectorMapJoinOptimizedMultiKeyHashMap extends VectorMapJoinOptimizedHashMap { - // UNDONE: How to look for all NULLs in a multi-key????? Let nulls through for now. - public VectorMapJoinOptimizedMultiKeyHashMap(boolean isOuterJoin, MapJoinTableContainer originalTableContainer, ReusableGetAdaptor hashMapRowGetter) { super(originalTableContainer, hashMapRowGetter); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedNonMatchedIterator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedNonMatchedIterator.java new file mode 100644 index 0000000000..89d707cec7 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedNonMatchedIterator.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin.optimized; + +import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.NonMatchedSmallTableIterator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.optimized.VectorMapJoinOptimizedHashMap.HashMapResult; + +/** + * The abstract class for vectorized non-match Small Table key iteration. + */ +public abstract class VectorMapJoinOptimizedNonMatchedIterator + extends VectorMapJoinNonMatchedIterator { + + protected NonMatchedSmallTableIterator nonMatchedIterator; + + protected HashMapResult nonMatchedHashMapResult; + + public VectorMapJoinOptimizedNonMatchedIterator(MatchTracker matchTracker) { + super(matchTracker); + } + + @Override + public boolean findNextNonMatched() { + return nonMatchedIterator.isNext(); + } + + @Override + public VectorMapJoinHashMapResult getNonMatchedHashMapResult() { + if (nonMatchedHashMapResult == null) { + nonMatchedHashMapResult = new HashMapResult(nonMatchedIterator.getHashMapResult()); + } + nonMatchedHashMapResult.setJoinResult(JoinResult.MATCH); + return nonMatchedHashMapResult; + } +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringCommon.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringCommon.java index a8ccfa4a77..da0e8365b1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringCommon.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringCommon.java @@ -26,19 +26,9 @@ /* * An single byte array value hash map based on the BytesBytesMultiHashMap. - * - * Since BytesBytesMultiHashMap does not interpret the key as BinarySortable we optimize - * this case and just reference the byte array key directly for the lookup instead of serializing - * the byte array into BinarySortable. We rely on it just doing byte array equality comparisons. */ public class VectorMapJoinOptimizedStringCommon { - // private boolean isOuterJoin; - - // private BinarySortableDeserializeRead keyBinarySortableDeserializeRead; - - // private ReadStringResults readStringResults; - private BinarySortableSerializeWrite keyBinarySortableSerializeWrite; private transient Output output; @@ -55,18 +45,13 @@ public SerializedBytes serialize(byte[] keyBytes, int keyStart, int keyLength) t serializedBytes.length = output.getLength(); return serializedBytes; - } public VectorMapJoinOptimizedStringCommon(boolean isOuterJoin) { - // this.isOuterJoin = isOuterJoin; - // PrimitiveTypeInfo[] primitiveTypeInfos = { TypeInfoFactory.stringTypeInfo }; - // keyBinarySortableDeserializeRead = new BinarySortableDeserializeRead(primitiveTypeInfos); - // readStringResults = keyBinarySortableDeserializeRead.createReadStringResults(); - // bytesWritable = new BytesWritable(); + keyBinarySortableSerializeWrite = new BinarySortableSerializeWrite(1); output = new Output(); keyBinarySortableSerializeWrite.set(output); serializedBytes = new SerializedBytes(); } -} \ No newline at end of file +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringHashMap.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringHashMap.java index f2074ec2ca..6c4d8a81d1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringHashMap.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/optimized/VectorMapJoinOptimizedStringHashMap.java @@ -22,12 +22,18 @@ import org.apache.hadoop.hive.ql.exec.JoinUtil.JoinResult; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinBytesHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; /* - * An multi-key hash map based on the BytesBytesMultiHashMap. + * An string hash map based on the BytesBytesMultiHashMap. */ public class VectorMapJoinOptimizedStringHashMap extends VectorMapJoinOptimizedHashMap @@ -35,14 +41,59 @@ private VectorMapJoinOptimizedStringCommon stringCommon; - /* - @Override - public void putRow(BytesWritable currentKey, BytesWritable currentValue) - throws SerDeException, HiveException, IOException { + private static class NonMatchedStringHashMapIterator extends NonMatchedBytesHashMapIterator { + + private BinarySortableDeserializeRead keyBinarySortableDeserializeRead; + + NonMatchedStringHashMapIterator(MatchTracker matchTracker, + VectorMapJoinOptimizedStringHashMap hashMap) { + super(matchTracker, hashMap); + } + + @Override + public void init() { + super.init(); + + TypeInfo[] typeInfos = new TypeInfo[] {TypeInfoFactory.stringTypeInfo}; + keyBinarySortableDeserializeRead = + new BinarySortableDeserializeRead(typeInfos, /* useExternalBuffer */ false); + } + + @Override + public boolean readNonMatchedBytesKey() throws HiveException { + super.doReadNonMatchedBytesKey(); - stringCommon.adaptPutRow((VectorMapJoinOptimizedHashTable) this, currentKey, currentValue); + byte[] bytes = keyRef.getBytes(); + final int keyOffset = (int) keyRef.getOffset(); + final int keyLength = keyRef.getLength(); + try { + keyBinarySortableDeserializeRead.set(bytes, keyOffset, keyLength); + return keyBinarySortableDeserializeRead.readNextField(); + } catch (IOException e) { + throw new HiveException(e); + } + } + + @Override + public byte[] getNonMatchedBytes() { + return keyBinarySortableDeserializeRead.currentBytes; + } + + @Override + public int getNonMatchedBytesOffset() { + return keyBinarySortableDeserializeRead.currentBytesStart; + } + + @Override + public int getNonMatchedBytesLength() { + return keyBinarySortableDeserializeRead.currentBytesLength; + } + } + + @Override + public VectorMapJoinNonMatchedIterator createNonMatchedIterator(MatchTracker matchTracker) { + return new NonMatchedStringHashMapIterator(matchTracker, this); } - */ @Override public JoinResult lookup(byte[] keyBytes, int keyStart, int keyLength, @@ -55,6 +106,17 @@ public JoinResult lookup(byte[] keyBytes, int keyStart, int keyLength, } + @Override + public JoinResult lookup(byte[] keyBytes, int keyStart, int keyLength, + VectorMapJoinHashMapResult hashMapResult, MatchTracker matchTracker) throws IOException { + + SerializedBytes serializedBytes = stringCommon.serialize(keyBytes, keyStart, keyLength); + + return super.lookup(serializedBytes.bytes, serializedBytes.offset, serializedBytes.length, + hashMapResult, matchTracker); + + } + public VectorMapJoinOptimizedStringHashMap(boolean isOuterJoin, MapJoinTableContainer originalTableContainer, ReusableGetAdaptor hashMapRowGetter) { super(originalTableContainer, hashMapRowGetter); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java index ab97b3a3fb..e17a17fe04 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java @@ -515,6 +515,9 @@ protected MapJoinOperator convertJoinToBucketMapJoin( joinContext.getBigTablePosition(), false, false); + if (mapJoinOp == null) { + return null; + } // Remove the join operator from the query join context // Data structures coming from QBJoinTree mapJoinOp.getConf().setQBJoinTreeProps(joinOp.getConf()); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java index c733cb18d9..67e8583798 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java @@ -150,8 +150,9 @@ numBuckets = 1; } LOG.info("Estimated number of buckets " + numBuckets); - int mapJoinConversionPos = getMapJoinConversionPos(joinOp, context, numBuckets, false, maxJoinMemory, true); - if (mapJoinConversionPos < 0) { + MapJoinConversion mapJoinConversion = + getMapJoinConversion(joinOp, context, numBuckets, false, maxJoinMemory, true); + if (mapJoinConversion == null) { Object retval = checkAndConvertSMBJoin(context, joinOp, tezBucketJoinProcCtx); if (retval == null) { return retval; @@ -167,10 +168,10 @@ if (context.conf.getBoolVar(HiveConf.ConfVars.HIVE_CONVERT_JOIN_BUCKET_MAPJOIN_TEZ)) { // Check if we are in LLAP, if so it needs to be determined if we should use BMJ or DPHJ if (llapInfo != null) { - if (selectJoinForLlap(context, joinOp, tezBucketJoinProcCtx, llapInfo, mapJoinConversionPos, numBuckets)) { + if (selectJoinForLlap(context, joinOp, tezBucketJoinProcCtx, llapInfo, mapJoinConversion, numBuckets)) { return null; } - } else if (convertJoinBucketMapJoin(joinOp, context, mapJoinConversionPos, tezBucketJoinProcCtx)) { + } else if (convertJoinBucketMapJoin(joinOp, context, mapJoinConversion, tezBucketJoinProcCtx)) { return null; } } @@ -179,16 +180,27 @@ // check if we can convert to map join no bucket scaling. LOG.info("Convert to non-bucketed map join"); if (numBuckets != 1) { - mapJoinConversionPos = getMapJoinConversionPos(joinOp, context, 1, false, maxJoinMemory, true); + mapJoinConversion = getMapJoinConversion(joinOp, context, 1, false, maxJoinMemory, true); } - if (mapJoinConversionPos < 0) { + if (mapJoinConversion == null) { // we are just converting to a common merge join operator. The shuffle // join in map-reduce case. fallbackToReduceSideJoin(joinOp, context); return null; } - MapJoinOperator mapJoinOp = convertJoinMapJoin(joinOp, context, mapJoinConversionPos, true); + // Currently, this is a MJ path and we don's support FULL OUTER MapJoin yet. + if (mapJoinConversion.getIsFullOuterJoin() && + !mapJoinConversion.getIsFullOuterEnabledForMapJoin()) { + fallbackToReduceSideJoin(joinOp, context); + return null; + } + + MapJoinOperator mapJoinOp = convertJoinMapJoin(joinOp, context, mapJoinConversion, true); + if (mapJoinOp == null) { + fallbackToReduceSideJoin(joinOp, context); + return null; + } // map join operator by default has no bucket cols and num of reduce sinks // reduced by 1 mapJoinOp.setOpTraits(new OpTraits(null, -1, null, @@ -231,11 +243,11 @@ public static HashMapDataStructureType of(JoinDesc conf) { private boolean selectJoinForLlap(OptimizeTezProcContext context, JoinOperator joinOp, TezBucketJoinProcCtx tezBucketJoinProcCtx, LlapClusterStateForCompile llapInfo, - int mapJoinConversionPos, int numBuckets) throws SemanticException { + MapJoinConversion mapJoinConversion, int numBuckets) throws SemanticException { if (!context.conf.getBoolVar(HiveConf.ConfVars.HIVEDYNAMICPARTITIONHASHJOIN) && numBuckets > 1) { // DPHJ is disabled, only attempt BMJ or mapjoin - return convertJoinBucketMapJoin(joinOp, context, mapJoinConversionPos, tezBucketJoinProcCtx); + return convertJoinBucketMapJoin(joinOp, context, mapJoinConversion, tezBucketJoinProcCtx); } int numExecutorsPerNode = -1; @@ -251,6 +263,7 @@ private boolean selectJoinForLlap(OptimizeTezProcContext context, JoinOperator j LOG.debug("Number of nodes = " + numNodes + ". Number of Executors per node = " + numExecutorsPerNode); // Determine the size of small table inputs + final int mapJoinConversionPos = mapJoinConversion.getBigTablePos(); long totalSize = 0; for (int pos = 0; pos < joinOp.getParentOperators().size(); pos++) { if (pos == mapJoinConversionPos) { @@ -284,7 +297,7 @@ private boolean selectJoinForLlap(OptimizeTezProcContext context, JoinOperator j return convertJoinDynamicPartitionedHashJoin(joinOp, context); } else if (numBuckets > 1) { LOG.info("Bucket Map Join chosen"); - return convertJoinBucketMapJoin(joinOp, context, mapJoinConversionPos, tezBucketJoinProcCtx); + return convertJoinBucketMapJoin(joinOp, context, mapJoinConversion, tezBucketJoinProcCtx); } // fallback to mapjoin no bucket scaling LOG.info("Falling back to mapjoin no bucket scaling"); @@ -559,8 +572,13 @@ private void setAllChildrenTraits(Operator currentOp, Op } private boolean convertJoinBucketMapJoin(JoinOperator joinOp, OptimizeTezProcContext context, - int bigTablePosition, TezBucketJoinProcCtx tezBucketJoinProcCtx) throws SemanticException { + MapJoinConversion mapJoinConversion, TezBucketJoinProcCtx tezBucketJoinProcCtx) throws SemanticException { + if (mapJoinConversion.getIsFullOuterJoin() && + !mapJoinConversion.getIsFullOuterEnabledForMapJoin()) { + return false; + } + final int bigTablePosition = mapJoinConversion.getBigTablePos(); if (!checkConvertJoinBucketMapJoin(joinOp, bigTablePosition, tezBucketJoinProcCtx)) { LOG.info("Check conversion to bucket map join failed."); return false; @@ -592,7 +610,7 @@ private boolean convertJoinBucketMapJoin(JoinOperator joinOp, OptimizeTezProcCon } } - MapJoinOperator mapJoinOp = convertJoinMapJoin(joinOp, context, bigTablePosition, true); + MapJoinOperator mapJoinOp = convertJoinMapJoin(joinOp, context, mapJoinConversion, true); if (mapJoinOp == null) { LOG.debug("Conversion to bucket map join failed."); return false; @@ -913,6 +931,46 @@ private boolean isCrossProduct(JoinOperator joinOp) { return true; } + /** + * Return result for getMapJoinConversion method. + */ + public static class MapJoinConversion { + + private final int bigTablePos; + + private final boolean isFullOuterJoin; + private final boolean isFullOuterEnabledForDynamicPartitionHashJoin; + private final boolean isFullOuterEnabledForMapJoin; + + public MapJoinConversion(int bigTablePos, boolean isFullOuterJoin, + boolean isFullOuterEnabledForDynamicPartitionHashJoin, boolean isFullOuterEnabledForMapJoin) { + this.bigTablePos = bigTablePos; + + this.isFullOuterJoin = isFullOuterJoin; + this.isFullOuterEnabledForDynamicPartitionHashJoin = isFullOuterEnabledForDynamicPartitionHashJoin; + this.isFullOuterEnabledForMapJoin = isFullOuterEnabledForMapJoin; + } + + public int getBigTablePos() { + return bigTablePos; + } + + /* + * Do we have a single FULL OUTER JOIN here? + */ + public boolean getIsFullOuterJoin() { + return isFullOuterJoin; + } + + public boolean getIsFullOuterEnabledForDynamicPartitionHashJoin() { + return isFullOuterEnabledForDynamicPartitionHashJoin; + } + + public boolean getIsFullOuterEnabledForMapJoin() { + return isFullOuterEnabledForMapJoin; + } + } + /** * Obtain big table position for join. * @@ -926,9 +984,11 @@ private boolean isCrossProduct(JoinOperator joinOp) { * @return returns big table position or -1 if it cannot be determined * @throws SemanticException */ - public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext context, + public MapJoinConversion getMapJoinConversion(JoinOperator joinOp, OptimizeTezProcContext context, int buckets, boolean skipJoinTypeChecks, long maxSize, boolean checkMapJoinThresholds) throws SemanticException { + JoinDesc joinDesc = joinOp.getConf(); + JoinCondDesc[] conds = joinDesc.getConds(); if (!skipJoinTypeChecks) { /* * HIVE-9038: Join tests fail in tez when we have more than 1 join on the same key and there is @@ -937,14 +997,32 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c * new operation to be able to support this. This seems like a corner case enough to special * case this for now. */ - if (joinOp.getConf().getConds().length > 1) { + if (conds.length > 1) { if (hasOuterJoin(joinOp)) { - return -1; + return null; } } } + + // Assume. + boolean isFullOuterEnabledForDynamicPartitionHashJoin = false; + boolean isFullOuterEnabledForMapJoin = false; + + boolean isFullOuterJoin = + MapJoinProcessor.precheckFullOuter(context.conf, joinOp); + if (isFullOuterJoin) { + + boolean isFullOuterEnabled = MapJoinProcessor.isFullOuterMapEnabled(context.conf, joinOp); + if (isFullOuterEnabled) { + + // FUTURE: Currently, we only support DPHJ. + isFullOuterEnabledForDynamicPartitionHashJoin = + MapJoinProcessor.isFullOuterEnabledForDynamicPartitionHashJoin(context.conf, joinOp); + } + } + Set bigTableCandidateSet = - MapJoinProcessor.getBigTableCandidates(joinOp.getConf().getConds()); + MapJoinProcessor.getBigTableCandidates(conds, /* isSupportFullOuter */ true); int bigTablePosition = -1; // big input cumulative row count long bigInputCumulativeCardinality = -1L; @@ -967,7 +1045,7 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c Statistics currInputStat = parentOp.getStatistics(); if (currInputStat == null) { LOG.warn("Couldn't get statistics from: " + parentOp); - return -1; + return null; } long inputSize = computeOnlineDataSize(currInputStat); @@ -980,14 +1058,14 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c if (foundInputNotFittingInMemory) { // cannot convert to map join; we've already chosen a big table // on size and there's another one that's bigger. - return -1; + return null; } if (inputSize/buckets > maxSize) { if (!bigTableCandidateSet.contains(pos)) { // can't use the current table as the big table, but it's too // big for the map side. - return -1; + return null; } currentInputNotFittingInMemory = true; @@ -1002,7 +1080,7 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c Long cardinality = computeCumulativeCardinality(parentOp); if (cardinality == null) { // We could not get stats, we cannot convert - return -1; + return null; } currentInputCumulativeCardinality = cardinality; } @@ -1041,7 +1119,7 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c if (totalSize/buckets > maxSize) { // sum of small tables size in this join exceeds configured limit // hence cannot convert. - return -1; + return null; } if (selectedBigTable) { @@ -1056,7 +1134,7 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c if (checkMapJoinThresholds && convertDPHJ && checkShuffleSizeForLargeTable(joinOp, bigTablePosition, context)) { LOG.debug("Conditions to convert to MapJoin are not met"); - return -1; + return null; } // only allow cross product in map joins if build side is 'small' @@ -1070,7 +1148,7 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c HiveConf.getIntVar(context.conf, HiveConf.ConfVars.XPRODSMALLTABLEROWSTHRESHOLD)) { // if any of smaller side is estimated to generate more than // threshold rows we would disable mapjoin - return -1; + return null; } } } @@ -1081,7 +1159,12 @@ public int getMapJoinConversionPos(JoinOperator joinOp, OptimizeTezProcContext c // equal to sum of small tables size. joinOp.getConf().setInMemoryDataSize(totalSize / buckets); - return bigTablePosition; + return + new MapJoinConversion( + bigTablePosition, + isFullOuterJoin, + isFullOuterEnabledForDynamicPartitionHashJoin, + isFullOuterEnabledForMapJoin); } // This is akin to CBO cumulative cardinality model @@ -1130,7 +1213,8 @@ private static Long computeCumulativeCardinality(Operator parentOp : joinOp.getParentOperators()) { @@ -1140,15 +1224,20 @@ public MapJoinOperator convertJoinMapJoin(JoinOperator joinOp, OptimizeTezProcCo } // can safely convert the join to a map join. + final int bigTablePosition = mapJoinConversion.getBigTablePos(); MapJoinOperator mapJoinOp = MapJoinProcessor.convertJoinOpMapJoinOp(context.conf, joinOp, joinOp.getConf().isLeftInputJoin(), joinOp.getConf().getBaseSrc(), joinOp.getConf().getMapAliases(), bigTablePosition, true, removeReduceSink); - mapJoinOp.getConf().setHybridHashJoin(HiveConf.getBoolVar(context.conf, + if (mapJoinOp == null) { + return null; + } + MapJoinDesc mapJoinDesc = mapJoinOp.getConf(); + mapJoinDesc.setHybridHashJoin(HiveConf.getBoolVar(context.conf, HiveConf.ConfVars.HIVEUSEHYBRIDGRACEHASHJOIN)); - List joinExprs = mapJoinOp.getConf().getKeys().values().iterator().next(); + List joinExprs = mapJoinDesc.getKeys().values().iterator().next(); if (joinExprs.size() == 0) { // In case of cross join, we disable hybrid grace hash join - mapJoinOp.getConf().setHybridHashJoin(false); + mapJoinDesc.setHybridHashJoin(false); } Operator parentBigTableOp = @@ -1356,18 +1445,29 @@ private boolean convertJoinDynamicPartitionedHashJoin(JoinOperator joinOp, Optim // Since we don't have big table index yet, must start with estimate of numReducers int numReducers = estimateNumBuckets(joinOp, false); LOG.info("Try dynamic partitioned hash join with estimated " + numReducers + " reducers"); - int bigTablePos = getMapJoinConversionPos(joinOp, context, numReducers, false, maxJoinMemory, false); - if (bigTablePos >= 0) { + MapJoinConversion mapJoinConversion = + getMapJoinConversion(joinOp, context, numReducers, false, maxJoinMemory, false); + if (mapJoinConversion != null) { + if (mapJoinConversion.getIsFullOuterJoin() && + !mapJoinConversion.getIsFullOuterEnabledForDynamicPartitionHashJoin()) { + return false; + } + final int bigTablePos = mapJoinConversion.getBigTablePos(); + // Now that we have the big table index, get real numReducers value based on big table RS ReduceSinkOperator bigTableParentRS = (ReduceSinkOperator) (joinOp.getParentOperators().get(bigTablePos)); numReducers = bigTableParentRS.getConf().getNumReducers(); LOG.debug("Real big table reducers = " + numReducers); - MapJoinOperator mapJoinOp = convertJoinMapJoin(joinOp, context, bigTablePos, false); + MapJoinOperator mapJoinOp = convertJoinMapJoin(joinOp, context, mapJoinConversion, false); if (mapJoinOp != null) { LOG.info("Selected dynamic partitioned hash join"); - mapJoinOp.getConf().setDynamicPartitionHashJoin(true); + MapJoinDesc mapJoinDesc = mapJoinOp.getConf(); + mapJoinDesc.setDynamicPartitionHashJoin(true); + if (mapJoinConversion.getIsFullOuterJoin()) { + FullOuterMapJoinOptimization.removeFilterMap(mapJoinDesc); + } // Set OpTraits for dynamically partitioned hash join: // bucketColNames: Re-use previous joinOp's bucketColNames. Parent operators should be // reduce sink, which should have bucket columns based on the join keys. @@ -1408,11 +1508,15 @@ private void fallbackToReduceSideJoin(JoinOperator joinOp, OptimizeTezProcContex private void fallbackToMergeJoin(JoinOperator joinOp, OptimizeTezProcContext context) throws SemanticException { - int pos = getMapJoinConversionPos(joinOp, context, estimateNumBuckets(joinOp, false), - true, Long.MAX_VALUE, false); - if (pos < 0) { + MapJoinConversion mapJoinConversion = + getMapJoinConversion( + joinOp, context, estimateNumBuckets(joinOp, false), true, Long.MAX_VALUE, false); + final int pos; + if (mapJoinConversion == null || mapJoinConversion.getBigTablePos() == -1) { LOG.info("Could not get a valid join position. Defaulting to position 0"); pos = 0; + } else { + pos = mapJoinConversion.getBigTablePos(); } LOG.info("Fallback to common merge join operator"); convertJoinSMBJoin(joinOp, context, pos, 0, false); @@ -1435,7 +1539,6 @@ private boolean checkNumberOfEntriesForHashTable(JoinOperator joinOp, int positi for (String key : keys) { ColStatistics cs = inputStats.getColumnStatisticsFromColName(key); if (cs == null) { - LOG.debug("Couldn't get statistics for: {}", key); return true; } columnStats.add(cs); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/FullOuterMapJoinOptimization.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/FullOuterMapJoinOptimization.java new file mode 100644 index 0000000000..b9e86ebbc6 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/FullOuterMapJoinOptimization.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.optimizer; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.MapJoinDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.SerDeUtils; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; + +import com.google.common.base.Joiner; + +/** + * FULL OUTER MapJoin planning. + */ +public class FullOuterMapJoinOptimization { + + FullOuterMapJoinOptimization() { + } + + public static void removeFilterMap(MapJoinDesc mapJoinDesc) throws SemanticException { + int[][] filterMaps = mapJoinDesc.getFilterMap(); + if (filterMaps == null) { + return; + } + final byte posBigTable = (byte) mapJoinDesc.getPosBigTable(); + final int numAliases = mapJoinDesc.getExprs().size(); + List valueFilteredTblDescs = mapJoinDesc.getValueFilteredTblDescs(); + for (byte pos = 0; pos < numAliases; pos++) { + if (pos != posBigTable) { + int[] filterMap = filterMaps[pos]; + TableDesc tableDesc = valueFilteredTblDescs.get(pos); + Properties properties = tableDesc.getProperties(); + String columnNameProperty = properties.getProperty(serdeConstants.LIST_COLUMNS); + String columnNameDelimiter = + properties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? + properties.getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : + String.valueOf(SerDeUtils.COMMA); + + String columnTypeProperty = properties.getProperty(serdeConstants.LIST_COLUMN_TYPES); + List columnNameList; + if (columnNameProperty.length() == 0) { + columnNameList = new ArrayList(); + } else { + columnNameList = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); + } + List truncatedColumnNameList = columnNameList.subList(0, columnNameList.size() - 1); + String truncatedColumnNameProperty = + Joiner.on(columnNameDelimiter).join(truncatedColumnNameList); + + List columnTypeList; + if (columnTypeProperty.length() == 0) { + columnTypeList = new ArrayList(); + } else { + columnTypeList = TypeInfoUtils + .getTypeInfosFromTypeString(columnTypeProperty); + } + if (!columnTypeList.get(columnTypeList.size() - 1).equals(TypeInfoFactory.shortTypeInfo)) { + throw new SemanticException("Expecting filterTag smallint as last column type"); + } + List truncatedColumnTypeList = + columnTypeList.subList(0, columnTypeList.size() - 1); + String truncatedColumnTypeProperty = + Joiner.on(",").join(truncatedColumnTypeList); + + properties.setProperty(serdeConstants.LIST_COLUMNS, truncatedColumnNameProperty); + properties.setProperty(serdeConstants.LIST_COLUMN_TYPES, truncatedColumnTypeProperty); + } + } + mapJoinDesc.setFilterMap(null); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index e3ae0bf0f3..cff32d3a23 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -952,6 +952,7 @@ public static void finalMapWorkChores( SparkWork work = sparkTask.getWork(); for (BaseWork w : work.getAllWorkUnsorted()) { if (w instanceof MapWork) { + ((MapWork) w).internTable(interner); ((MapWork) w).deriveLlap(conf, false); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java index 019372bc0a..23186c6d3f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java @@ -26,6 +26,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.Stack; @@ -48,6 +49,7 @@ import org.apache.hadoop.hive.ql.exec.ScriptOperator; import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.UnionOperator; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinKey; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.GraphWalker; @@ -56,6 +58,7 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; +import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer.EnabledOverride; import org.apache.hadoop.hive.ql.parse.GenMapRedWalker; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -75,6 +78,7 @@ import org.apache.hadoop.hive.ql.plan.SelectDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -336,6 +340,9 @@ public MapJoinOperator convertMapJoin(HiveConf conf, // create the map-join operator MapJoinOperator mapJoinOp = convertJoinOpMapJoinOp(conf, op, leftInputJoin, baseSrc, mapAliases, mapJoinPos, noCheckOuterJoin); + if (mapJoinOp == null) { + return null; + } // remove old parents for (pos = 0; pos < newParentOps.size(); pos++) { @@ -356,6 +363,213 @@ public MapJoinOperator convertMapJoin(HiveConf conf, return mapJoinOp; } + public static boolean onExpressionHasNullSafes(JoinDesc desc) { + boolean[] nullSafes = desc.getNullSafes(); + if (nullSafes == null) { + return false; + } + for (boolean nullSafe : nullSafes) { + if (nullSafe) { + return true; + } + } + return false; + } + + private static boolean checkFullOuterMapJoinCompatible(HiveConf hiveConf, + JoinOperator joinOp) throws SemanticException { + JoinDesc joinDesc = joinOp.getConf(); + + // Make sure all key and value expressions are columns. + for (Entry> mapEntry : joinDesc.getExprs().entrySet()) { + List exprList = mapEntry.getValue(); + for (ExprNodeDesc expr : exprList) { + if (!(expr instanceof ExprNodeColumnDesc)) { + LOG.debug("FULL OUTER MapJoin: only column expressions are supported " + expr.toString()); + return false; + } + } + } + + // Check for supported key data types. + Byte[] order = joinDesc.getTagOrder(); + ExprNodeDesc[][] joinKeysArray = joinDesc.getJoinKeys(); + for (int i = 0; i < order.length; i++) { + byte pos = order[i]; + ExprNodeDesc[] keyExprs = joinKeysArray[pos]; + for (ExprNodeDesc keyExpr : keyExprs) { + TypeInfo typeInfo = keyExpr.getTypeInfo(); + + // Verify we handle the key column types for an optimized table. This is the effectively + // the same check used in Tez HashTableLoader. + if (!MapJoinKey.isSupportedField(typeInfo)) { + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + " key type " + typeInfo.toString() + " not supported"); + } + return false; + } + } + } + + if (onExpressionHasNullSafes(joinDesc)) { + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + "nullsafe not supported"); + } + return false; + } + + boolean isVectorizationMapJoinNativeEnabled = HiveConf.getBoolVar(hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_ENABLED); + boolean isHybridHashJoin = HiveConf.getBoolVar(hiveConf, + HiveConf.ConfVars.HIVEUSEHYBRIDGRACEHASHJOIN); + if (isVectorizationMapJoinNativeEnabled && isHybridHashJoin) { + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + "Native Vector MapJoin and Hybrid Grace not supported"); + } + return false; + } + + if (joinDesc.getResidualFilterExprs() != null && + joinDesc.getResidualFilterExprs().size() != 0) { + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + "non-equi joins not supported"); + } + return false; + } + + return true; + } + + public static boolean precheckFullOuter(HiveConf hiveConf, JoinOperator joinOp) + throws SemanticException { + JoinDesc joinDesc = joinOp.getConf(); + JoinCondDesc[] conds = joinDesc.getConds(); + + /* + * Are we even being asked to do a FULL OUTER JOIN? + */ + boolean hasFullOuterJoin = false; + for (JoinCondDesc cond : conds) { + if (cond.getType() == JoinDesc.FULL_OUTER_JOIN) { + hasFullOuterJoin = true; + break; + } + } + if (!hasFullOuterJoin) { + return false; + } + + if (conds.length > 1) { + + // No multiple condition FULL OUTER MapJoin. + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: multiple JOIN conditions not supported"); + } + return false; + } + + return true; + } + + public static boolean isFullOuterMapEnabled(HiveConf hiveConf, JoinOperator joinOp) + throws SemanticException { + + final String testMapJoinFullOuterOverrideString = + HiveConf.getVar(hiveConf, + HiveConf.ConfVars.HIVE_TEST_MAPJOINFULLOUTER_OVERRIDE); + EnabledOverride mapJoinFullOuterOverride = + EnabledOverride.nameMap.get(testMapJoinFullOuterOverrideString); + + final boolean isEnabled = + HiveConf.getBoolVar( + hiveConf, + HiveConf.ConfVars.HIVEMAPJOINFULLOUTER); + switch (mapJoinFullOuterOverride) { + case NONE: + { + if (!isEnabled) { + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + HiveConf.ConfVars.HIVEMAPJOINFULLOUTER.varname + " is false"); + } + return false; + } + } + break; + case DISABLE: + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + HiveConf.ConfVars.HIVE_TEST_MAPJOINFULLOUTER_OVERRIDE.varname + " is disable (" + + " " + HiveConf.ConfVars.HIVEMAPJOINFULLOUTER.varname + " is " + isEnabled + ")"); + } + return false; + case ENABLE: + + // Different parts of the code may rely on this being set... + HiveConf.setBoolVar(hiveConf, + HiveConf.ConfVars.HIVEMAPJOINFULLOUTER, true); + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin is enabled: " + + HiveConf.ConfVars.HIVE_TEST_MAPJOINFULLOUTER_OVERRIDE.varname + " is enable (" + + " " + HiveConf.ConfVars.HIVEMAPJOINFULLOUTER.varname + " is " + isEnabled + ")"); + } + break; + default: + throw new RuntimeException("Unexpected vectorization enabled override " + + mapJoinFullOuterOverride); + } + + final String engine = + HiveConf.getVar( + hiveConf, + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); + final boolean isTezEngine = engine.equalsIgnoreCase("tez"); + if (!isTezEngine) { + + // Only Tez for now. + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: Only Tez engine supported"); + } + return false; + } + + /* + * Optimized Hash Table (i.e. not old-style MR HashMap). + */ + final boolean isOptimizedHashTableEnabled = + HiveConf.getBoolVar( + hiveConf, + HiveConf.ConfVars.HIVEMAPJOINUSEOPTIMIZEDTABLE); + if (!isOptimizedHashTableEnabled) { + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin not enabled: " + + HiveConf.ConfVars.HIVEMAPJOINUSEOPTIMIZEDTABLE.varname + " is false"); + } + return false; + } + + boolean isCompatibleFullOuterMapJoin = checkFullOuterMapJoinCompatible(hiveConf, joinOp); + if (!isCompatibleFullOuterMapJoin) { + return false; + } + + if (LOG.isDebugEnabled()) { + LOG.debug("FULL OUTER MapJoin enabled"); + } + return true; + } + + public static boolean isFullOuterEnabledForDynamicPartitionHashJoin(HiveConf hiveConf, JoinOperator joinOp) + throws SemanticException { + JoinDesc joinDesc = joinOp.getConf(); + + return true; + } + public static MapJoinOperator convertJoinOpMapJoinOp(HiveConf hconf, JoinOperator op, boolean leftInputJoin, String[] baseSrc, List mapAliases, int mapJoinPos, boolean noCheckOuterJoin) throws SemanticException { @@ -371,6 +585,9 @@ public static MapJoinOperator convertJoinOpMapJoinOp(HiveConf hconf, MapJoinDesc mapJoinDescriptor = getMapJoinDesc(hconf, op, leftInputJoin, baseSrc, mapAliases, mapJoinPos, noCheckOuterJoin, adjustParentsChildren); + if (mapJoinDescriptor == null) { + return null; + } // reduce sink row resolver used to generate map join op RowSchema outputRS = op.getSchema(); @@ -474,6 +691,9 @@ public MapJoinOperator generateMapJoinOperator(ParseContext pctx, JoinOperator o MapJoinOperator mapJoinOp = convertMapJoin(pctx.getConf(), op, op.getConf().isLeftInputJoin(), op.getConf().getBaseSrc(), op.getConf().getMapAliases(), mapJoinPos, noCheckOuterJoin, true); + if (mapJoinOp == null) { + return null; + } // create a dummy select to select all columns genSelectPlan(pctx, mapJoinOp); @@ -502,8 +722,29 @@ public MapJoinOperator generateMapJoinOperator(ParseContext pctx, JoinOperator o * @return set of big table candidates */ public static Set getBigTableCandidates(JoinCondDesc[] condns) { + return getBigTableCandidates(condns, /* isSupportFullOuter */ false); + } + + public static Set getBigTableCandidates(JoinCondDesc[] condns, + boolean isSupportFullOuter) { + Set bigTableCandidates = new HashSet(); + if (condns.length == 1) { + JoinCondDesc condn = condns[0]; + if (condn.getType() == JoinDesc.FULL_OUTER_JOIN) { + + if (!isSupportFullOuter) { + return new HashSet(); + } + + // FULL OUTER MapJoin must be a single condition. + bigTableCandidates.add(condn.getLeft()); + bigTableCandidates.add(condn.getRight()); + return bigTableCandidates; + } + } + boolean seenOuterJoin = false; Set seenPostitions = new HashSet(); Set leftPosListOfLastRightOuterJoin = new HashSet(); @@ -512,17 +753,14 @@ public MapJoinOperator generateMapJoinOperator(ParseContext pctx, JoinOperator o boolean lastSeenRightOuterJoin = false; for (JoinCondDesc condn : condns) { int joinType = condn.getType(); + if (joinType == JoinDesc.FULL_OUTER_JOIN) { + return new HashSet(); + } + seenPostitions.add(condn.getLeft()); seenPostitions.add(condn.getRight()); - if (joinType == JoinDesc.FULL_OUTER_JOIN) { - // setting these 2 parameters here just in case that if the code got - // changed in future, these 2 are not missing. - seenOuterJoin = true; - lastSeenRightOuterJoin = false; - // empty set - cannot convert - return new HashSet(); - } else if (joinType == JoinDesc.LEFT_OUTER_JOIN + if (joinType == JoinDesc.LEFT_OUTER_JOIN || joinType == JoinDesc.LEFT_SEMI_JOIN) { seenOuterJoin = true; if(bigTableCandidates.size() == 0) { @@ -564,7 +802,8 @@ public MapJoinOperator generateMapJoinOperator(ParseContext pctx, JoinOperator o * @throws SemanticException if given position is not in the big table candidates. */ public static int checkMapJoin(int mapJoinPos, JoinCondDesc[] condns) { - Set bigTableCandidates = MapJoinProcessor.getBigTableCandidates(condns); + Set bigTableCandidates = + MapJoinProcessor.getBigTableCandidates(condns, /* isSupportFullOuter */ true); // bigTableCandidates can never be null if (!bigTableCandidates.contains(mapJoinPos)) { @@ -1183,6 +1422,9 @@ public static MapJoinDesc getMapJoinDesc(HiveConf hconf, } List keyCols = keyExprMap.get((byte) mapJoinPos); + if (keyCols == null) { + return null; + } List outputColumnNames = op.getConf().getOutputColumnNames(); TableDesc keyTableDesc = diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkMapJoinProcessor.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkMapJoinProcessor.java index 16d088ab0b..a5400d6b27 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkMapJoinProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SparkMapJoinProcessor.java @@ -64,6 +64,9 @@ public MapJoinOperator convertMapJoin(HiveConf conf, MapJoinOperator mapJoinOp = convertJoinOpMapJoinOp(conf, op, op.getConf().isLeftInputJoin(), op.getConf().getBaseSrc(), op.getConf().getMapAliases(), bigTablePos, noCheckOuterJoin); + if (mapJoinOp == null) { + return null; + } // 1. remove RS as parent for the big table branch // 2. remove old join op from child set of all the RSs diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveDruidRules.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveDruidRules.java index a914210c31..7760cdc648 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveDruidRules.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveDruidRules.java @@ -37,6 +37,7 @@ import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.AggregateCall; +import org.apache.calcite.rel.rules.DateRangeRules; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.fun.SqlSumEmptyIsZeroAggFunction; import org.apache.calcite.tools.RelBuilder; @@ -96,6 +97,9 @@ public static final AggregateExpandDistinctAggregatesDruidRule EXPAND_SINGLE_DISTINCT_AGGREGATES_DRUID_RULE = new AggregateExpandDistinctAggregatesDruidRule(HiveRelFactories.HIVE_BUILDER); + public static final DateRangeRules.FilterDateRangeRule FILTER_DATE_RANGE_RULE = + new DateRangeRules.FilterDateRangeRule(HiveRelFactories.HIVE_BUILDER); + /** * This is a simplified version of {@link org.apache.calcite.rel.rules.AggregateExpandDistinctAggregatesRule} * The goal of this simplified version is to help pushing single count distinct as multi-phase aggregates. diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index 48974f8dda..51b186cde0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -73,6 +73,9 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterLongOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterMultiKeyOperator; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinOuterStringOperator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterLongOperator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterMultiKeyOperator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.VectorMapJoinFullOuterStringOperator; import org.apache.hadoop.hive.ql.exec.vector.ptf.VectorPTFOperator; import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; @@ -122,6 +125,7 @@ import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.MapredWork; +import org.apache.hadoop.hive.ql.plan.MergeJoinWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PTFDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; @@ -289,15 +293,15 @@ private HiveConf hiveConf; - public static enum VectorizationEnabledOverride { + public enum EnabledOverride { NONE, DISABLE, ENABLE; - public final static Map nameMap = - new HashMap(); + public static final Map nameMap = + new HashMap(); static { - for (VectorizationEnabledOverride vectorizationEnabledOverride : values()) { + for (EnabledOverride vectorizationEnabledOverride : values()) { nameMap.put( vectorizationEnabledOverride.name().toLowerCase(), vectorizationEnabledOverride); } @@ -305,7 +309,7 @@ } boolean isVectorizationEnabled; - private VectorizationEnabledOverride vectorizationEnabledOverride; + private EnabledOverride vectorizationEnabledOverride; boolean isTestForcedVectorizationEnable; private boolean useVectorizedInputFileFormat; @@ -725,10 +729,63 @@ public VectorDesc getVectorDesc() { } } - private List> newOperatorList() { + private static List> newOperatorList() { return new ArrayList>(); } + public static void debugDisplayJoinOperatorTree(Operator joinOperator, + String prefix) { + List> currentParentList = newOperatorList(); + currentParentList.add(joinOperator); + + int depth = 0; + do { + List> nextParentList = newOperatorList(); + + final int count = currentParentList.size(); + for (int i = 0; i < count; i++) { + Operator parent = currentParentList.get(i); + System.out.println(prefix + " parent depth " + depth + " " + + parent.getClass().getSimpleName() + " " + parent.toString()); + + List> parentList = parent.getParentOperators(); + if (parentList == null || parentList.size() == 0) { + continue; + } + + nextParentList.addAll(parentList); + } + + currentParentList = nextParentList; + depth--; + } while (currentParentList.size() > 0); + + List> currentChildList = newOperatorList(); + currentChildList.addAll(joinOperator.getChildOperators()); + + depth = 1; + do { + List> nextChildList = newOperatorList(); + + final int count = currentChildList.size(); + for (int i = 0; i < count; i++) { + Operator child = currentChildList.get(i); + System.out.println(prefix + " child depth " + depth + " " + + child.getClass().getSimpleName() + " " + child.toString()); + + List> childList = child.getChildOperators(); + if (childList == null || childList.size() == 0) { + continue; + } + + nextChildList.addAll(childList); + } + + currentChildList = nextChildList; + depth--; + } while (currentChildList.size() > 0); + } + private Operator validateAndVectorizeOperatorTree( Operator nonVecRootOperator, boolean isReduce, boolean isTezOrSpark, @@ -973,7 +1030,15 @@ public Object dispatch(Node nd, Stack stack, Object... nodeOutputs) if (isReduceVectorizationEnabled) { convertReduceWork(reduceWork); } + logReduceWorkExplainVectorization(reduceWork); + } else if (baseWork instanceof MergeJoinWork){ + MergeJoinWork mergeJoinWork = (MergeJoinWork) baseWork; + + // Always set the EXPLAIN conditions. + setMergeJoinWorkExplainConditions(mergeJoinWork); + + logMergeJoinWorkExplainVectorization(mergeJoinWork); } } } else if (currTask instanceof SparkTask) { @@ -1029,6 +1094,11 @@ private void setReduceWorkExplainConditions(ReduceWork reduceWork) { HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)); } + private void setMergeJoinWorkExplainConditions(MergeJoinWork mergeJoinWork) { + + setExplainConditions(mergeJoinWork); + } + private boolean logExplainVectorization(BaseWork baseWork, String name) { if (!baseWork.getVectorizationExamined()) { @@ -1101,6 +1171,13 @@ private void logReduceWorkExplainVectorization(ReduceWork reduceWork) { LOG.info("Reducer engine: " + reduceWork.getVectorReduceEngine()); } + private void logMergeJoinWorkExplainVectorization(MergeJoinWork mergeJoinWork) { + + if (!logExplainVectorization(mergeJoinWork, "MergeJoin")) { + return; + } + } + private void convertMapWork(MapWork mapWork, boolean isTezOrSpark) throws SemanticException { // We have to evaluate the input format to see if vectorization is enabled, so @@ -2319,7 +2396,7 @@ public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticE HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_TEST_VECTORIZATION_ENABLED_OVERRIDE); vectorizationEnabledOverride = - VectorizationEnabledOverride.nameMap.get(vectorizationEnabledOverrideString); + EnabledOverride.nameMap.get(vectorizationEnabledOverrideString); isVectorizationEnabled = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED); @@ -3264,7 +3341,7 @@ private boolean isBigTableOnlyResults(MapJoinDesc desc) { HashTableImplementationType hashTableImplementationType = HashTableImplementationType.NONE; HashTableKind hashTableKind = HashTableKind.NONE; HashTableKeyType hashTableKeyType = HashTableKeyType.NONE; - VectorMapJoinVariation vectorMapJoinVariation = VectorMapJoinVariation.NONE; + VectorMapJoinVariation vectorMapJoinVariation = null; if (vectorDesc.getIsFastHashTableEnabled()) { hashTableImplementationType = HashTableImplementationType.FAST; @@ -3334,6 +3411,10 @@ private boolean isBigTableOnlyResults(MapJoinDesc desc) { vectorMapJoinVariation = VectorMapJoinVariation.OUTER; hashTableKind = HashTableKind.HASH_MAP; break; + case JoinDesc.FULL_OUTER_JOIN: + vectorMapJoinVariation = VectorMapJoinVariation.FULL_OUTER; + hashTableKind = HashTableKind.HASH_MAP; + break; case JoinDesc.LEFT_SEMI_JOIN: vectorMapJoinVariation = VectorMapJoinVariation.LEFT_SEMI; hashTableKind = HashTableKind.HASH_SET; @@ -3363,6 +3444,9 @@ private boolean isBigTableOnlyResults(MapJoinDesc desc) { case OUTER: opClass = VectorMapJoinOuterLongOperator.class; break; + case FULL_OUTER: + opClass = VectorMapJoinFullOuterLongOperator.class; + break; default: throw new HiveException("Unknown operator variation " + vectorMapJoinVariation); } @@ -3381,6 +3465,9 @@ private boolean isBigTableOnlyResults(MapJoinDesc desc) { case OUTER: opClass = VectorMapJoinOuterStringOperator.class; break; + case FULL_OUTER: + opClass = VectorMapJoinFullOuterStringOperator.class; + break; default: throw new HiveException("Unknown operator variation " + vectorMapJoinVariation); } @@ -3399,6 +3486,9 @@ private boolean isBigTableOnlyResults(MapJoinDesc desc) { case OUTER: opClass = VectorMapJoinOuterMultiKeyOperator.class; break; + case FULL_OUTER: + opClass = VectorMapJoinFullOuterMultiKeyOperator.class; + break; default: throw new HiveException("Unknown operator variation " + vectorMapJoinVariation); } @@ -3414,6 +3504,10 @@ private boolean isBigTableOnlyResults(MapJoinDesc desc) { vectorDesc.setHashTableKind(hashTableKind); vectorDesc.setHashTableKeyType(hashTableKeyType); vectorDesc.setVectorMapJoinVariation(vectorMapJoinVariation); + if (vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) { + + vectorDesc.setIsFullOuter(true); + } vectorDesc.setMinMaxEnabled(minMaxEnabled); vectorDesc.setVectorMapJoinInfo(vectorMapJoinInfo); @@ -3526,6 +3620,8 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi /* * Similarly, we need a mapping since a value expression can be a calculation and the value * will go into a scratch column. + * + * Value expressions include keys? YES. */ int[] bigTableValueColumnMap = new int[allBigTableValueExpressions.length]; String[] bigTableValueColumnNames = new String[allBigTableValueExpressions.length]; @@ -3565,18 +3661,24 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi vectorDesc.setAllBigTableValueExpressions(allBigTableValueExpressions); /* - * Small table information. + * Column mapping. */ - VectorColumnOutputMapping bigTableRetainedMapping = - new VectorColumnOutputMapping("Big Table Retained Mapping"); + VectorColumnOutputMapping bigTableRetainMapping = + new VectorColumnOutputMapping("Big Table Retain Mapping"); - VectorColumnOutputMapping bigTableOuterKeyMapping = - new VectorColumnOutputMapping("Big Table Outer Key Mapping"); + VectorColumnOutputMapping nonOuterSmallTableKeyMapping = + new VectorColumnOutputMapping("Non Outer Small Table Key Key Mapping"); + + VectorColumnOutputMapping outerSmallTableKeyMapping = + new VectorColumnOutputMapping("Outer Small Table Key Mapping"); + + VectorColumnSourceMapping fullOuterSmallTableKeyMapping = + new VectorColumnSourceMapping("Full Outer Small Table Key Mapping"); // The order of the fields in the LazyBinary small table value must be used, so // we use the source ordering flavor for the mapping. - VectorColumnSourceMapping smallTableMapping = - new VectorColumnSourceMapping("Small Table Mapping"); + VectorColumnSourceMapping smallTableValueMapping = + new VectorColumnSourceMapping("Small Table Value Mapping"); Byte[] order = desc.getTagOrder(); Byte posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]); @@ -3586,7 +3688,6 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi * Gather up big and small table output result information from the MapJoinDesc. */ List bigTableRetainList = desc.getRetainList().get(posBigTable); - int bigTableRetainSize = bigTableRetainList.size(); int[] smallTableIndices; int smallTableIndicesSize; @@ -3623,6 +3724,8 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi VectorColumnSourceMapping projectionMapping = new VectorColumnSourceMapping("Projection Mapping"); int nextOutputColumn = (order[0] == posBigTable ? 0 : smallTableResultSize); + + final int bigTableRetainSize = bigTableRetainList.size(); for (int i = 0; i < bigTableRetainSize; i++) { // Since bigTableValueExpressions may do a calculation and produce a scratch column, we @@ -3636,9 +3739,10 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi projectionMapping.add(nextOutputColumn, batchColumnIndex, typeInfo); // Collect columns we copy from the big table batch to the overflow batch. - if (!bigTableRetainedMapping.containsOutputColumn(batchColumnIndex)) { + if (!bigTableRetainMapping.containsOutputColumn(batchColumnIndex)) { + // Tolerate repeated use of a big table column. - bigTableRetainedMapping.add(batchColumnIndex, batchColumnIndex, typeInfo); + bigTableRetainMapping.add(batchColumnIndex, batchColumnIndex, typeInfo); } nextOutputColumn++; @@ -3655,10 +3759,8 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi nextOutputColumn = firstSmallTableOutputColumn; // Small table indices has more information (i.e. keys) than retain, so use it if it exists... - String[] bigTableRetainedNames; if (smallTableIndicesSize > 0) { smallTableOutputCount = smallTableIndicesSize; - bigTableRetainedNames = new String[smallTableOutputCount]; for (int i = 0; i < smallTableIndicesSize; i++) { if (smallTableIndices[i] >= 0) { @@ -3670,8 +3772,7 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Since bigTableKeyExpressions may do a calculation and produce a scratch column, we // need to map the right column. - int batchKeyColumn = bigTableKeyColumnMap[keyIndex]; - bigTableRetainedNames[i] = bigTableKeyColumnNames[keyIndex]; + int bigTableKeyColumn = bigTableKeyColumnMap[keyIndex]; TypeInfo typeInfo = bigTableKeyTypeInfos[keyIndex]; if (!isOuterJoin) { @@ -3679,25 +3780,30 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Optimize inner join keys of small table results. // Project the big table key into the small table result "area". - projectionMapping.add(nextOutputColumn, batchKeyColumn, typeInfo); + projectionMapping.add(nextOutputColumn, bigTableKeyColumn, typeInfo); - if (!bigTableRetainedMapping.containsOutputColumn(batchKeyColumn)) { - // If necessary, copy the big table key into the overflow batch's small table - // result "area". - bigTableRetainedMapping.add(batchKeyColumn, batchKeyColumn, typeInfo); + if (!bigTableRetainMapping.containsOutputColumn(bigTableKeyColumn)) { + + // When the Big Key is not retained in the output result, we do need to copy the + // Big Table key into the overflow batch so the projection of it (Big Table key) to + // the Small Table key will work properly... + // + nonOuterSmallTableKeyMapping.add(bigTableKeyColumn, bigTableKeyColumn, typeInfo); } } else { - // For outer joins, since the small table key can be null when there is no match, + // For outer joins, since the small table key can be null when there for NOMATCH, // we must have a physical (scratch) column for those keys. We cannot use the - // projection optimization used by inner joins above. + // projection optimization used by non-[FULL} OUTER joins above. int scratchColumn = vContext.allocateScratchColumn(typeInfo); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); - bigTableRetainedMapping.add(batchKeyColumn, scratchColumn, typeInfo); + outerSmallTableKeyMapping.add(bigTableKeyColumn, scratchColumn, typeInfo); - bigTableOuterKeyMapping.add(batchKeyColumn, scratchColumn, typeInfo); + // For FULL OUTER MapJoin, we need to be able to deserialize a Small Table key + // into the output result. + fullOuterSmallTableKeyMapping.add(keyIndex, scratchColumn, typeInfo); } } else { @@ -3711,21 +3817,18 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi smallTableExprVectorizes = false; } - bigTableRetainedNames[i] = smallTableExprNode.toString(); - TypeInfo typeInfo = smallTableExprNode.getTypeInfo(); // Make a new big table scratch column for the small table value. int scratchColumn = vContext.allocateScratchColumn(typeInfo); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); - smallTableMapping.add(smallTableValueIndex, scratchColumn, typeInfo); + smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo); } nextOutputColumn++; } } else if (smallTableRetainSize > 0) { smallTableOutputCount = smallTableRetainSize; - bigTableRetainedNames = new String[smallTableOutputCount]; // Only small table values appear in join output result. @@ -3738,21 +3841,24 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi smallTableExprVectorizes = false; } - bigTableRetainedNames[i] = smallTableExprNode.toString(); - // Make a new big table scratch column for the small table value. TypeInfo typeInfo = smallTableExprNode.getTypeInfo(); int scratchColumn = vContext.allocateScratchColumn(typeInfo); projectionMapping.add(nextOutputColumn, scratchColumn, typeInfo); - smallTableMapping.add(smallTableValueIndex, scratchColumn, typeInfo); + smallTableValueMapping.add(smallTableValueIndex, scratchColumn, typeInfo); nextOutputColumn++; } - } else { - bigTableRetainedNames = new String[0]; } + Map> filterExpressions = desc.getFilters(); + VectorExpression[] bigTableFilterExpressions = + vContext.getVectorExpressions( + filterExpressions.get(posBigTable), + VectorExpressionDescriptor.Mode.FILTER); + vectorMapJoinInfo.setBigTableFilterExpressions(bigTableFilterExpressions); + boolean useOptimizedTable = HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEMAPJOINUSEOPTIMIZEDTABLE); @@ -3808,15 +3914,23 @@ private boolean canSpecializeMapJoin(Operator op, MapJoi // Convert dynamic arrays and maps to simple arrays. - bigTableRetainedMapping.finalize(); + bigTableRetainMapping.finalize(); + vectorMapJoinInfo.setBigTableRetainColumnMap(bigTableRetainMapping.getOutputColumns()); + vectorMapJoinInfo.setBigTableRetainTypeInfos(bigTableRetainMapping.getTypeInfos()); + + nonOuterSmallTableKeyMapping.finalize(); + vectorMapJoinInfo.setNonOuterSmallTableKeyColumnMap(nonOuterSmallTableKeyMapping.getOutputColumns()); + vectorMapJoinInfo.setNonOuterSmallTableKeyTypeInfos(nonOuterSmallTableKeyMapping.getTypeInfos()); + + outerSmallTableKeyMapping.finalize(); + fullOuterSmallTableKeyMapping.finalize(); - bigTableOuterKeyMapping.finalize(); + vectorMapJoinInfo.setOuterSmallTableKeyMapping(outerSmallTableKeyMapping); + vectorMapJoinInfo.setFullOuterSmallTableKeyMapping(fullOuterSmallTableKeyMapping); - smallTableMapping.finalize(); + smallTableValueMapping.finalize(); - vectorMapJoinInfo.setBigTableRetainedMapping(bigTableRetainedMapping); - vectorMapJoinInfo.setBigTableOuterKeyMapping(bigTableOuterKeyMapping); - vectorMapJoinInfo.setSmallTableMapping(smallTableMapping); + vectorMapJoinInfo.setSmallTableValueMapping(smallTableValueMapping); projectionMapping.finalize(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java index 689c88895a..ad6db21fe6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkMapJoinOptimizer.java @@ -459,6 +459,9 @@ public MapJoinOperator convertJoinMapJoin(JoinOperator joinOp, OptimizeSparkProc MapJoinProcessor.convertJoinOpMapJoinOp(context.getConf(), joinOp, joinOp.getConf().isLeftInputJoin(), joinOp.getConf().getBaseSrc(), joinOp.getConf().getMapAliases(), bigTablePosition, true); + if (mapJoinOp == null) { + return null; + } Operator parentBigTableOp = mapJoinOp.getParentOperators().get(bigTablePosition); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index 15958d5e8d..4c5695c68a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -1775,20 +1775,11 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, if (numAttr > 1 && conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_CORRELATED_MULTI_KEY_JOINS)) { denom = Collections.max(distinctVals); denomUnmatched = denom - ndvsUnmatched.get(distinctVals.indexOf(denom)); - } else if (numAttr > numParent) { + } else { // To avoid denominator getting larger and aggressively reducing // number of rows, we will ease out denominator. denom = StatsUtils.addWithExpDecay(distinctVals); denomUnmatched = denom - StatsUtils.addWithExpDecay(ndvsUnmatched); - } else { - for (Long l : distinctVals) { - denom = StatsUtils.safeMult(denom, l); - } - long tempDenom = 1; - for (Long l : ndvsUnmatched) { - tempDenom = StatsUtils.safeMult(tempDenom, l); - } - denomUnmatched = denom - tempDenom; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f24add311d..3309b9b5c8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -1332,13 +1332,8 @@ private static void checkColumnName(String columnName) throws SemanticException ASTNode child = (ASTNode) ast.getChild(i); if (child.getToken().getType() == HiveParser.TOK_TABSORTCOLNAMEASC) { child = (ASTNode) child.getChild(0); - if (child.getToken().getType() == HiveParser.TOK_NULLS_FIRST) { - colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), - HIVE_COLUMN_ORDER_ASC)); - } else { - throw new SemanticException("create/alter table: " - + "not supported NULLS LAST for ORDER BY in ASC order"); - } + colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), + HIVE_COLUMN_ORDER_ASC)); } else { child = (ASTNode) child.getChild(0); if (child.getToken().getType() == HiveParser.TOK_NULLS_LAST) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 39f27b10eb..9e6de0520d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -1441,12 +1441,14 @@ RelNode logicalPlan() throws SemanticException { * @return Optimized SQL text (or null, if failed) */ public String getOptimizedSql(RelNode optimizedOptiqPlan) { + boolean nullsLast = HiveConf.getBoolVar(conf, ConfVars.HIVE_DEFAULT_NULLS_LAST); + NullCollation nullCollation = nullsLast ? NullCollation.LAST : NullCollation.LOW; SqlDialect dialect = new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT .withDatabaseProduct(SqlDialect.DatabaseProduct.HIVE) .withDatabaseMajorVersion(4) // TODO: should not be hardcoded .withDatabaseMinorVersion(0) .withIdentifierQuoteString("`") - .withNullCollation(NullCollation.LOW)) { + .withNullCollation(nullCollation)) { @Override protected boolean allowsAs() { return true; @@ -1894,6 +1896,7 @@ public RelNode apply(RelOptCluster cluster, RelOptSchema relOptSchema, SchemaPlu perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, + HiveDruidRules.FILTER_DATE_RANGE_RULE, HiveDruidRules.FILTER, HiveDruidRules.PROJECT_FILTER_TRANSPOSE, HiveDruidRules.AGGREGATE_FILTER_TRANSPOSE, HiveDruidRules.AGGREGATE_PROJECT, diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainConfiguration.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainConfiguration.java index 2f12bda535..a92502e746 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainConfiguration.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainConfiguration.java @@ -49,6 +49,7 @@ private VectorizationDetailLevel vectorizationDetailLevel = VectorizationDetailLevel.SUMMARY; private boolean locks = false; private boolean ast = false; + private boolean debug = false; private Path explainRootPath; private Map opIdToRuntimeNumRows; @@ -139,6 +140,14 @@ public void setVectorizationDetailLevel(VectorizationDetailLevel vectorizationDe this.vectorizationDetailLevel = vectorizationDetailLevel; } + public boolean isDebug() { + return debug; + } + + public void setDebug(boolean debug) { + this.debug = debug; + } + public Path getExplainRootPath() { return explainRootPath; } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java index b17efffd86..428a3c90bb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java @@ -116,6 +116,8 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { config.setLocks(true); } else if (explainOptions == HiveParser.KW_AST){ config.setAst(true); + } else if (explainOptions == HiveParser.KW_DEBUG) { + config.setDebug(true); } else { // UNDONE: UNKNOWN OPTION? } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index 807f6f593e..3caa51f899 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -189,6 +189,7 @@ KW_FILE: 'FILE'; KW_JAR: 'JAR'; KW_EXPLAIN: 'EXPLAIN'; KW_EXTENDED: 'EXTENDED'; +KW_DEBUG: 'DEBUG'; KW_FORMATTED: 'FORMATTED'; KW_DEPENDENCY: 'DEPENDENCY'; KW_LOGICAL: 'LOGICAL'; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 98471a7520..48f73031a7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -569,6 +569,7 @@ import org.apache.hadoop.hive.conf.HiveConf; xlateMap.put("KW_FUNCTION", "FUNCTION"); xlateMap.put("KW_EXPLAIN", "EXPLAIN"); xlateMap.put("KW_EXTENDED", "EXTENDED"); + xlateMap.put("KW_DEBUG", "DEBUG"); xlateMap.put("KW_SERDE", "SERDE"); xlateMap.put("KW_WITH", "WITH"); xlateMap.put("KW_SERDEPROPERTIES", "SERDEPROPERTIES"); @@ -766,6 +767,12 @@ import org.apache.hadoop.hive.conf.HiveConf; public void setHiveConf(Configuration hiveConf) { this.hiveConf = hiveConf; } + protected boolean nullsLast() { + if(hiveConf == null){ + return false; + } + return HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST); + } } @rulecatch { @@ -804,6 +811,7 @@ explainOption | KW_LOCKS | KW_AST | (KW_VECTORIZATION vectorizationOnly? vectorizatonDetail?) + | KW_DEBUG ; vectorizationOnly @@ -2367,7 +2375,9 @@ columnNameOrder @init { pushMsg("column name order", state); } @after { popMsg(state); } : identifier orderSpec=orderSpecification? nullSpec=nullOrdering? - -> {$orderSpec.tree == null && $nullSpec.tree == null}? + -> {$orderSpec.tree == null && $nullSpec.tree == null && nullsLast()}? + ^(TOK_TABSORTCOLNAMEASC ^(TOK_NULLS_LAST identifier)) + -> {$orderSpec.tree == null && $nullSpec.tree == null && !nullsLast()}? ^(TOK_TABSORTCOLNAMEASC ^(TOK_NULLS_FIRST identifier)) -> {$orderSpec.tree == null}? ^(TOK_TABSORTCOLNAMEASC ^($nullSpec identifier)) @@ -2397,7 +2407,9 @@ columnRefOrder @init { pushMsg("column order", state); } @after { popMsg(state); } : expression orderSpec=orderSpecification? nullSpec=nullOrdering? - -> {$orderSpec.tree == null && $nullSpec.tree == null}? + -> {$orderSpec.tree == null && $nullSpec.tree == null && nullsLast()}? + ^(TOK_TABSORTCOLNAMEASC ^(TOK_NULLS_LAST expression)) + -> {$orderSpec.tree == null && $nullSpec.tree == null && !nullsLast()}? ^(TOK_TABSORTCOLNAMEASC ^(TOK_NULLS_FIRST expression)) -> {$orderSpec.tree == null}? ^(TOK_TABSORTCOLNAMEASC ^($nullSpec expression)) diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index f327a121d8..f9c97e025d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -831,6 +831,7 @@ nonReserved | KW_OPERATOR | KW_EXPRESSION | KW_DETAIL + | KW_DEBUG | KW_WAIT | KW_ZONE | KW_TIMESTAMPTZ diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java index cf159051a9..973a65b7b1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AddPartitionHandler.java @@ -85,14 +85,19 @@ public void handle(Context withinContext) throws Exception { withinContext.hiveConf); Iterator partitionFilesIter = apm.getPartitionFilesIter().iterator(); - for (Partition qlPtn : qlPtns) { - Iterable files = partitionFilesIter.next().getFiles(); - if (files != null) { - // encoded filename/checksum of files, write into _files - try (BufferedWriter fileListWriter = writer(withinContext, qlPtn)) { - for (String file : files) { - fileListWriter.write(file); - fileListWriter.newLine(); + + // We expect one to one mapping between partitions and file iterators. For external table, this + // list would be empty. So, it is enough to check hasNext outside the loop. + if (partitionFilesIter.hasNext()) { + for (Partition qlPtn : qlPtns) { + Iterable files = partitionFilesIter.next().getFiles(); + if (files != null) { + // encoded filename/checksum of files, write into _files + try (BufferedWriter fileListWriter = writer(withinContext, qlPtn)) { + for (String file : files) { + fileListWriter.write(file); + fileListWriter.newLine(); + } } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java index da302434a0..030bb6152d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java @@ -28,7 +28,7 @@ @Retention(RetentionPolicy.RUNTIME) public @interface Explain { public enum Level { - USER, DEFAULT, EXTENDED; + USER, DEFAULT, EXTENDED, DEBUG; public boolean in(Level[] levels) { for (Level level : levels) { if (level.equals(this)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java index 0654f57e74..01da4d558d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java @@ -165,6 +165,10 @@ public VectorizationDetailLevel isVectorizationDetailLevel() { return config.getVectorizationDetailLevel(); } + public boolean isDebug() { + return config.isDebug(); + } + public ParseContext getParseContext() { return pCtx; } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java index 3820d57f0f..0eb03e98a3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java @@ -126,7 +126,7 @@ public String getJoinCondString() { sb.append("Inner Join "); break; case JoinDesc.FULL_OUTER_JOIN: - sb.append("Outer Join "); + sb.append("Full Outer Join "); break; case JoinDesc.LEFT_OUTER_JOIN: sb.append("Left Outer Join "); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java index 4313a6b440..2c93c2a760 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java @@ -291,7 +291,7 @@ public void setExprs(final Map> exprs) { first = false; sb.append("{"); - sb.append(expr.getExprString()); + sb.append(expr == null ? "NULL" : expr.getExprString()); sb.append("}"); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java index dc4f085203..8ba5101326 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java @@ -33,10 +33,16 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.MemoryMonitorInfo; +import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext; +import org.apache.hadoop.hive.ql.optimizer.signature.Signature; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation; +import org.apache.hadoop.hive.serde2.AbstractSerDe; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hadoop.hive.serde2.SerDeUtils; +import org.apache.hive.common.util.ReflectionUtil; /** * Map Join operator Descriptor implementation. @@ -91,6 +97,7 @@ public MapJoinDesc(MapJoinDesc clone) { this.keys = clone.keys; this.keyTblDesc = clone.keyTblDesc; this.valueTblDescs = clone.valueTblDescs; + this.valueFilteredTblDescs = clone.valueFilteredTblDescs; this.posBigTable = clone.posBigTable; this.valueIndices = clone.valueIndices; this.retainList = clone.retainList; @@ -208,6 +215,16 @@ public void setDumpFilePrefix(String dumpFilePrefix) { this.dumpFilePrefix = dumpFilePrefix; } + // NOTE: Debugging only. + @Explain(displayName = "keyExpressions", explainLevels = { Level.DEBUG }) + public Map getKeyExpressionString() { + Map keyMap = new LinkedHashMap(); + for (Map.Entry> k: getKeys().entrySet()) { + keyMap.put(k.getKey(), k.getValue().toString()); + } + return keyMap; + } + /** * @return the keys in string form */ @@ -292,6 +309,60 @@ public void setValueFilteredTblDescs(List valueFilteredTblDescs) { return valueTblDescs; } + // NOTE: Debugging only. + @Explain(displayName = "keyContext", explainLevels = { Level.DEBUG }) + public String getDebugKeyContext() { + MapJoinObjectSerDeContext keyContext; + try { + AbstractSerDe keySerde = + (AbstractSerDe) ReflectionUtil.newInstance( + keyTblDesc.getDeserializerClass(), null); + SerDeUtils.initializeSerDe(keySerde, null, keyTblDesc.getProperties(), null); + keyContext = new MapJoinObjectSerDeContext(keySerde, false); + } catch (SerDeException e) { + return null; + } + return keyContext.stringify(); + } + + private boolean hasFilter(int alias, int[][] filterMaps) { + return filterMaps != null && filterMaps[alias] != null; + } + + // NOTE: Debugging only. + @Explain(displayName = "valueContexts", explainLevels = { Level.DEBUG }) + public String getDebugValueContext() { + List valueContextStringList = new ArrayList(); + try { + boolean noOuterJoin = getNoOuterJoin(); + // Order in which the results should be output. + Byte[] order = getTagOrder(); + int[][] filterMaps = getFilterMap(); + + for (int pos = 0; pos < order.length; pos++) { + if (pos == posBigTable) { + continue; + } + TableDesc valueTableDesc; + if (noOuterJoin) { + valueTableDesc = getValueTblDescs().get(pos); + } else { + valueTableDesc = getValueFilteredTblDescs().get(pos); + } + AbstractSerDe valueSerDe = + (AbstractSerDe) ReflectionUtil.newInstance( + valueTableDesc.getDeserializerClass(), null); + SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null); + MapJoinObjectSerDeContext valueContext = + new MapJoinObjectSerDeContext(valueSerDe, hasFilter(pos, filterMaps)); + valueContextStringList.add(pos + ":" + valueContext.stringify()); + } + } catch (SerDeException e) { + return null; + } + return valueContextStringList.toString(); + } + /** * @param valueTblDescs * the valueTblDescs to set @@ -378,6 +449,8 @@ public boolean getGenJoinKeys() { return genJoinKeys; } + @Explain(displayName = "DynamicPartitionHashJoin", explainLevels = { Level.USER, Level.DEFAULT, + Level.EXTENDED }, displayOnlyOnTrue = true) public boolean isDynamicPartitionHashJoin() { return isDynamicPartitionHashJoin; } @@ -386,6 +459,25 @@ public void setDynamicPartitionHashJoin(boolean isDistributedHashJoin) { this.isDynamicPartitionHashJoin = isDistributedHashJoin; } + // NOTE: Debugging only. + @Explain(displayName = "outer filter mappings", explainLevels = { Level.DEBUG }) + public String getDebugOuterFilterMapString() { + if (conds.length != 1) { + return null; + } + JoinCondDesc cond = conds[0]; + if (cond.getType() != JoinDesc.FULL_OUTER_JOIN && + cond.getType() != JoinDesc.LEFT_OUTER_JOIN && + cond.getType() != JoinDesc.RIGHT_OUTER_JOIN) { + return null; + } + int[][] fm = getFilterMap(); + if (fm == null) { + return null; + } + return Arrays.deepToString(fm); + } + // Use LinkedHashSet to give predictable display order. private static final Set vectorizableMapJoinNativeEngines = new LinkedHashSet(Arrays.asList("tez", "spark")); @@ -401,7 +493,9 @@ public void setDynamicPartitionHashJoin(boolean isDistributedHashJoin) { public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, VectorMapJoinDesc vectorMapJoinDesc) { // VectorMapJoinOperator is not native vectorized. - super(vectorMapJoinDesc, vectorMapJoinDesc.getHashTableImplementationType() != HashTableImplementationType.NONE); + super( + vectorMapJoinDesc, + vectorMapJoinDesc.getHashTableImplementationType() != HashTableImplementationType.NONE); this.mapJoinDesc = mapJoinDesc; this.vectorMapJoinDesc = vectorMapJoinDesc; vectorMapJoinInfo = @@ -414,7 +508,8 @@ public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, String engine = vectorMapJoinDesc.getEngine(); String engineInSupportedCondName = - HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " " + engine + " IN " + vectorizableMapJoinNativeEngines; + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " " + + engine + " IN " + vectorizableMapJoinNativeEngines; boolean engineInSupported = vectorizableMapJoinNativeEngines.contains(engine); boolean isFastHashTableEnabled = vectorMapJoinDesc.getIsFastHashTableEnabled(); @@ -469,7 +564,8 @@ public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, return conditions; } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsMet", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getNativeConditionsMet() { if (nativeConditions == null) { nativeConditions = createNativeConditions(); @@ -477,7 +573,8 @@ public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, return VectorizationCondition.getConditionsMet(nativeConditions); } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsNotMet", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsNotMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getNativeConditionsNotMet() { if (nativeConditions == null) { nativeConditions = createNativeConditions(); @@ -485,7 +582,8 @@ public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, return VectorizationCondition.getConditionsNotMet(nativeConditions); } - @Explain(vectorization = Vectorization.EXPRESSION, displayName = "bigTableKeyExpressions", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "bigTableKeyExpressions", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getBigTableKeyExpressions() { return vectorExpressionsToStringList( isNative ? @@ -493,8 +591,18 @@ public MapJoinOperatorExplainVectorization(MapJoinDesc mapJoinDesc, vectorMapJoinDesc.getAllBigTableKeyExpressions()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableKeyColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getBigTableKeyColumnNums() { + @Explain(vectorization = Vectorization.EXPRESSION, displayName = "hashTableImplementationType", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public String hashTableImplementationType() { + if (!isNative) { + return null; + } + return vectorMapJoinDesc.getHashTableImplementationType().name(); + } + + @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableKeyColumns", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getBigTableKeyColumns() { if (!isNative) { return null; } @@ -502,10 +610,13 @@ public String getBigTableKeyColumnNums() { if (bigTableKeyColumnMap.length == 0) { return null; } - return Arrays.toString(bigTableKeyColumnMap); + return outputColumnsAndTypesToStringList( + vectorMapJoinInfo.getBigTableKeyColumnMap(), + vectorMapJoinInfo.getBigTableKeyTypeInfos()); } - @Explain(vectorization = Vectorization.EXPRESSION, displayName = "bigTableValueExpressions", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.EXPRESSION, displayName = "bigTableValueExpressions", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getBigTableValueExpressions() { return vectorExpressionsToStringList( isNative ? @@ -513,8 +624,18 @@ public String getBigTableKeyColumnNums() { vectorMapJoinDesc.getAllBigTableValueExpressions()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableValueColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getBigTableValueColumnNums() { + @Explain(vectorization = Vectorization.EXPRESSION, displayName = "bigTableFilterExpressions", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getBigTableFilterExpressions() { + if (!isNative) { + return null; + } + return vectorExpressionsToStringList(vectorMapJoinInfo.getBigTableFilterExpressions()); + } + + @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableValueColumns", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getBigTableValueColumns() { if (!isNative) { return null; } @@ -522,48 +643,78 @@ public String getBigTableValueColumnNums() { if (bigTableValueColumnMap.length == 0) { return null; } - return Arrays.toString(bigTableValueColumnMap); + return outputColumnsAndTypesToStringList( + vectorMapJoinInfo.getBigTableValueColumnMap(), + vectorMapJoinInfo.getBigTableValueTypeInfos()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "smallTableMapping", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getSmallTableColumns() { + @Explain(vectorization = Vectorization.DETAIL, displayName = "smallTableValueMapping", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getSmallTableColumns() { if (!isNative) { return null; } - return outputColumnsToStringList(vectorMapJoinInfo.getSmallTableMapping()); + return outputColumnsAndTypesToStringList(vectorMapJoinInfo.getSmallTableValueMapping()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "projectedOutputColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getProjectedOutputColumnNums() { + @Explain(vectorization = Vectorization.DETAIL, displayName = "projectedOutput", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getProjectedOutputColumnNums() { if (!isNative) { return null; } - return outputColumnsToStringList(vectorMapJoinInfo.getProjectionMapping()); + return outputColumnsAndTypesToStringList(vectorMapJoinInfo.getProjectionMapping()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableOuterKeyMapping", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public List getBigTableOuterKey() { - if (!isNative || vectorMapJoinDesc.getVectorMapJoinVariation() != VectorMapJoinVariation.OUTER) { + @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableRetainColumnNums", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public String getBigTableRetainedColumnNums() { + if (!isNative) { return null; } - return columnMappingToStringList(vectorMapJoinInfo.getBigTableOuterKeyMapping()); + return Arrays.toString(vectorMapJoinInfo.getBigTableRetainColumnMap()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "bigTableRetainedColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getBigTableRetainedColumnNums() { - if (!isNative) { + @Explain(vectorization = Vectorization.DETAIL, displayName = "nonOuterSmallTableKeyMapping", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public String getNonOuterSmallTableKeyMapping() { + if (!isNative || + (vectorMapJoinDesc.getVectorMapJoinVariation() == VectorMapJoinVariation.OUTER || + vectorMapJoinDesc.getVectorMapJoinVariation() == VectorMapJoinVariation.FULL_OUTER)) { + return null; + } + return Arrays.toString(vectorMapJoinInfo.getNonOuterSmallTableKeyColumnMap()); + } + + @Explain(vectorization = Vectorization.DETAIL, displayName = "outerSmallTableKeyMapping", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getOuterSmallTableKeyMapping() { + if (!isNative || + vectorMapJoinDesc.getVectorMapJoinVariation() != VectorMapJoinVariation.OUTER) { + return null; + } + return columnMappingToStringList(vectorMapJoinInfo.getOuterSmallTableKeyMapping()); + } + + @Explain(vectorization = Vectorization.DETAIL, displayName = "fullOuterSmallTableKeyMapping", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getFullOuterSmallTableKeyMapping() { + if (!isNative || + vectorMapJoinDesc.getVectorMapJoinVariation() != VectorMapJoinVariation.FULL_OUTER) { return null; } - return outputColumnsToStringList(vectorMapJoinInfo.getBigTableRetainedMapping()); + return columnMappingToStringList(vectorMapJoinInfo.getFullOuterSmallTableKeyMapping()); } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeNotSupportedKeyTypes", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeNotSupportedKeyTypes", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getNativeNotSupportedKeyTypes() { return vectorMapJoinDesc.getNotSupportedKeyTypes(); } } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "Map Join Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "Map Join Vectorization", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public MapJoinOperatorExplainVectorization getMapJoinVectorization() { VectorMapJoinDesc vectorMapJoinDesc = (VectorMapJoinDesc) getVectorDesc(); if (vectorMapJoinDesc == null || this instanceof SMBJoinDesc) { @@ -587,7 +738,8 @@ public SMBJoinOperatorExplainVectorization(SMBJoinDesc smbJoinDesc, } // Handle dual nature. - @Explain(vectorization = Vectorization.OPERATOR, displayName = "SMB Map Join Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "SMB Map Join Vectorization", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public SMBJoinOperatorExplainVectorization getSMBJoinVectorization() { VectorSMBJoinDesc vectorSMBJoinDesc = (VectorSMBJoinDesc) getVectorDesc(); if (vectorSMBJoinDesc == null || !(this instanceof SMBJoinDesc)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java index 45f33471df..dd907eff31 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MergeJoinWork.java @@ -29,7 +29,9 @@ import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; +import org.apache.hadoop.hive.ql.plan.BaseWork.BaseExplainVectorization; import org.apache.hadoop.hive.ql.plan.Explain.Level; +import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; import org.apache.hadoop.mapred.JobConf; public class MergeJoinWork extends BaseWork { @@ -180,4 +182,59 @@ public boolean getLlapMode() { public void addDummyOp(HashTableDummyOperator dummyOp) { getMainWork().addDummyOp(dummyOp); } + + /** + * For now, this class just says in EXPLAIN VECTORIZATION we don't support vectorization of the + * Merge Join vertex instead of being silent about it. + */ + public class MergeJoinExplainVectorization extends BaseExplainVectorization { + + private final MergeJoinWork mergeJoinWork; + + private VectorizationCondition[] mergeWorkVectorizationConditions; + + public MergeJoinExplainVectorization(MergeJoinWork mergeJoinWork) { + super(mergeJoinWork); + this.mergeJoinWork = mergeJoinWork; + } + + private VectorizationCondition[] createMergeWorkExplainVectorizationConditions() { + + boolean enabled = false; + + VectorizationCondition[] conditions = new VectorizationCondition[] { + new VectorizationCondition( + enabled, + "Vectorizing MergeJoin Supported") + }; + return conditions; + } + + @Explain(vectorization = Vectorization.SUMMARY, displayName = "enableConditionsMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getEnableConditionsMet() { + if (mergeWorkVectorizationConditions == null) { + mergeWorkVectorizationConditions = createMergeWorkExplainVectorizationConditions(); + } + return VectorizationCondition.getConditionsMet(mergeWorkVectorizationConditions); + } + + @Explain(vectorization = Vectorization.SUMMARY, displayName = "enableConditionsNotMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getEnableConditionsNotMet() { + if (mergeWorkVectorizationConditions == null) { + mergeWorkVectorizationConditions = createMergeWorkExplainVectorizationConditions(); + } + return VectorizationCondition.getConditionsNotMet(mergeWorkVectorizationConditions); + } + } + + @Explain(vectorization = Vectorization.SUMMARY, displayName = "MergeJoin Vectorization", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public MergeJoinExplainVectorization getReduceExplainVectorization() { + if (!getVectorizationExamined()) { + return null; + } + return new MergeJoinExplainVectorization(this); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorExplainVectorization.java ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorExplainVectorization.java index 446b8102a8..5439e1462c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorExplainVectorization.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorExplainVectorization.java @@ -59,6 +59,25 @@ public String outputColumnsToStringList(VectorColumnMapping vectorColumnMapping) return Arrays.toString(outputColumns); } + public List outputColumnsAndTypesToStringList(int[] outputColumns, TypeInfo[] typeInfos) { + final int size = outputColumns.length; + ArrayList result = new ArrayList(size); + for (int i = 0; i < size; i++) { + result.add(outputColumns[i] + ":" + typeInfos[i].toString()); + } + return result; + } + + public List outputColumnsAndTypesToStringList(VectorColumnMapping vectorColumnMapping) { + final int size = vectorColumnMapping.getCount(); + if (size == 0) { + return null; + } + int[] outputColumns = vectorColumnMapping.getOutputColumns(); + TypeInfo[] typeInfos = vectorColumnMapping.getTypeInfos(); + return outputColumnsAndTypesToStringList(outputColumns, typeInfos); + } + public List columnMappingToStringList(VectorColumnMapping vectorColumnMapping) { final int size = vectorColumnMapping.getCount(); if (size == 0) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java index 1cd6b95f65..863a34eff7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java @@ -110,7 +110,7 @@ public void initializeWindowing(WindowTableFunctionDef def) throws HiveException TableFunctionEvaluator tEval = def.getTFunction(); WindowingTableFunctionResolver tResolver = (WindowingTableFunctionResolver) constructResolver(def.getResolverClassName()); - tResolver.initialize(ptfDesc, def, tEval); + tResolver.initialize(hConf, ptfDesc, def, tEval); /* @@ -171,7 +171,7 @@ protected void initialize(PartitionedTableFunctionDef def) throws HiveException TableFunctionEvaluator tEval = def.getTFunction(); // TableFunctionResolver tResolver = FunctionRegistry.getTableFunctionResolver(def.getName()); TableFunctionResolver tResolver = constructResolver(def.getResolverClassName()); - tResolver.initialize(ptfDesc, def, tEval); + tResolver.initialize(hConf, ptfDesc, def, tEval); /* * 3. give Evaluator chance to setup for RawInput execution; setup RawInput shape diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java index 61216bc07a..b1d8e1feb1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.optimizer.signature.Signature; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.Explain.Level; import org.apache.hadoop.hive.ql.plan.Explain.Vectorization; import org.apache.hadoop.hive.ql.plan.VectorReduceSinkDesc.ReduceSinkKeyType; @@ -193,6 +194,17 @@ public Object clone() { return outputKeyColumnNames; } + // NOTE: Debugging only. + @Explain(displayName = "output key column names", explainLevels = { Level.DEBUG }) + public List getOutputKeyColumnNamesDisplay() { + List result = new ArrayList(); + for (String name : outputKeyColumnNames) { + result.add(Utilities.ReduceField.KEY.name() + "." + name); + } + return result; + } + + public void setOutputKeyColumnNames( java.util.ArrayList outputKeyColumnNames) { this.outputKeyColumnNames = outputKeyColumnNames; @@ -202,6 +214,16 @@ public void setOutputKeyColumnNames( return outputValueColumnNames; } + // NOTE: Debugging only. + @Explain(displayName = "output value column names", explainLevels = { Level.DEBUG }) + public List getOutputValueColumnNamesDisplay() { + List result = new ArrayList(); + for (String name : outputValueColumnNames) { + result.add(Utilities.ReduceField.VALUE.name() + "." + name); + } + return result; + } + public void setOutputValueColumnNames( java.util.ArrayList outputValueColumnNames) { this.outputValueColumnNames = outputValueColumnNames; @@ -540,34 +562,41 @@ public ReduceSinkOperatorExplainVectorization(ReduceSinkDesc reduceSinkDesc, return vectorExpressionsToStringList(vectorReduceSinkInfo.getReduceSinkValueExpressions()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "keyColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getKeyColumnNums() { + @Explain(vectorization = Vectorization.DETAIL, displayName = "keyColumns", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getKeyColumns() { if (!isNative) { return null; } int[] keyColumnMap = vectorReduceSinkInfo.getReduceSinkKeyColumnMap(); if (keyColumnMap == null) { // Always show an array. - keyColumnMap = new int[0]; + return new ArrayList(); } - return Arrays.toString(keyColumnMap); + return outputColumnsAndTypesToStringList( + vectorReduceSinkInfo.getReduceSinkKeyColumnMap(), + vectorReduceSinkInfo.getReduceSinkKeyTypeInfos()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "valueColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getValueColumnNums() { + @Explain(vectorization = Vectorization.DETAIL, displayName = "valueColumns", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getValueColumns() { if (!isNative) { return null; } int[] valueColumnMap = vectorReduceSinkInfo.getReduceSinkValueColumnMap(); if (valueColumnMap == null) { // Always show an array. - valueColumnMap = new int[0]; + return new ArrayList(); } - return Arrays.toString(valueColumnMap); + return outputColumnsAndTypesToStringList( + vectorReduceSinkInfo.getReduceSinkValueColumnMap(), + vectorReduceSinkInfo.getReduceSinkValueTypeInfos()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "bucketColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getBucketColumnNums() { + @Explain(vectorization = Vectorization.DETAIL, displayName = "bucketColumns", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getBucketColumns() { if (!isNative) { return null; } @@ -576,11 +605,14 @@ public String getBucketColumnNums() { // Suppress empty column map. return null; } - return Arrays.toString(bucketColumnMap); + return outputColumnsAndTypesToStringList( + vectorReduceSinkInfo.getReduceSinkBucketColumnMap(), + vectorReduceSinkInfo.getReduceSinkBucketTypeInfos()); } - @Explain(vectorization = Vectorization.DETAIL, displayName = "partitionColumnNums", explainLevels = { Level.DEFAULT, Level.EXTENDED }) - public String getPartitionColumnNums() { + @Explain(vectorization = Vectorization.DETAIL, displayName = "partitionColumns", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) + public List getPartitionColumns() { if (!isNative) { return null; } @@ -589,7 +621,9 @@ public String getPartitionColumnNums() { // Suppress empty column map. return null; } - return Arrays.toString(partitionColumnMap); + return outputColumnsAndTypesToStringList( + vectorReduceSinkInfo.getReduceSinkPartitionColumnMap(), + vectorReduceSinkInfo.getReduceSinkPartitionTypeInfos()); } private VectorizationCondition[] createNativeConditions() { @@ -598,7 +632,8 @@ public String getPartitionColumnNums() { String engine = vectorReduceSinkDesc.getEngine(); String engineInSupportedCondName = - HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " " + engine + " IN " + vectorizableReduceSinkNativeEngines; + HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname + " " + engine + " IN " + + vectorizableReduceSinkNativeEngines; boolean engineInSupported = vectorizableReduceSinkNativeEngines.contains(engine); VectorizationCondition[] conditions = new VectorizationCondition[] { @@ -633,7 +668,8 @@ public String getPartitionColumnNums() { return conditions; } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsMet", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getNativeConditionsMet() { if (nativeConditions == null) { nativeConditions = createNativeConditions(); @@ -641,7 +677,8 @@ public String getPartitionColumnNums() { return VectorizationCondition.getConditionsMet(nativeConditions); } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsNotMet", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "nativeConditionsNotMet", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public List getNativeConditionsNotMet() { if (nativeConditions == null) { nativeConditions = createNativeConditions(); @@ -650,7 +687,8 @@ public String getPartitionColumnNums() { } } - @Explain(vectorization = Vectorization.OPERATOR, displayName = "Reduce Sink Vectorization", explainLevels = { Level.DEFAULT, Level.EXTENDED }) + @Explain(vectorization = Vectorization.OPERATOR, displayName = "Reduce Sink Vectorization", + explainLevels = { Level.DEFAULT, Level.EXTENDED }) public ReduceSinkOperatorExplainVectorization getReduceSinkVectorization() { VectorReduceSinkDesc vectorReduceSinkDesc = (VectorReduceSinkDesc) getVectorDesc(); if (vectorReduceSinkDesc == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinDesc.java index 58032ca057..89a07b4ee3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinDesc.java @@ -85,11 +85,11 @@ public PrimitiveTypeInfo getPrimitiveTypeInfo() { } public static enum VectorMapJoinVariation { - NONE, - INNER_BIG_ONLY, INNER, + INNER_BIG_ONLY, LEFT_SEMI, - OUTER + OUTER, + FULL_OUTER } private HashTableImplementationType hashTableImplementationType; @@ -107,7 +107,7 @@ public VectorMapJoinDesc() { hashTableImplementationType = HashTableImplementationType.NONE; hashTableKind = HashTableKind.NONE; hashTableKeyType = HashTableKeyType.NONE; - vectorMapJoinVariation = VectorMapJoinVariation.NONE; + vectorMapJoinVariation = null; minMaxEnabled = false; allBigTableKeyExpressions = null; @@ -206,6 +206,7 @@ public VectorMapJoinInfo getVectorMapJoinInfo() { private List notSupportedKeyTypes; private boolean smallTableExprVectorizes; private boolean outerJoinHasNoKeys; + boolean isFullOuter; public void setUseOptimizedTable(boolean useOptimizedTable) { this.useOptimizedTable = useOptimizedTable; @@ -274,5 +275,10 @@ public void setIsHybridHashJoin(boolean isHybridHashJoin) { public boolean getIsHybridHashJoin() { return isHybridHashJoin; } - + public void setIsFullOuter(boolean isFullOuter) { + this.isFullOuter = isFullOuter; + } + public boolean getIsFullOuter() { + return isFullOuter; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinInfo.java ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinInfo.java index 6db0540bea..ad82e5cc9f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/VectorMapJoinInfo.java @@ -48,9 +48,19 @@ private TypeInfo[] bigTableValueTypeInfos; private VectorExpression[] slimmedBigTableValueExpressions; - private VectorColumnOutputMapping bigTableRetainedMapping; - private VectorColumnOutputMapping bigTableOuterKeyMapping; - private VectorColumnSourceMapping smallTableMapping; + private VectorExpression[] bigTableFilterExpressions; + + private int[] bigTableRetainColumnMap; + private TypeInfo[] bigTableRetainTypeInfos; + + private int[] nonOuterSmallTableKeyColumnMap; + private TypeInfo[] nonOuterSmallTableKeyTypeInfos; + + private VectorColumnOutputMapping outerSmallTableKeyMapping; + + private VectorColumnSourceMapping fullOuterSmallTableKeyMapping; + + private VectorColumnSourceMapping smallTableValueMapping; private VectorColumnSourceMapping projectionMapping; @@ -65,9 +75,19 @@ public VectorMapJoinInfo() { bigTableValueTypeInfos = null; slimmedBigTableValueExpressions = null; - bigTableRetainedMapping = null; - bigTableOuterKeyMapping = null; - smallTableMapping = null; + bigTableFilterExpressions = null; + + bigTableRetainColumnMap = null; + bigTableRetainTypeInfos = null; + + nonOuterSmallTableKeyColumnMap = null; + nonOuterSmallTableKeyTypeInfos = null; + + outerSmallTableKeyMapping = null; + + fullOuterSmallTableKeyMapping = null; + + smallTableValueMapping = null; projectionMapping = null; } @@ -138,28 +158,69 @@ public void setSlimmedBigTableValueExpressions( this.slimmedBigTableValueExpressions = slimmedBigTableValueExpressions; } - public void setBigTableRetainedMapping(VectorColumnOutputMapping bigTableRetainedMapping) { - this.bigTableRetainedMapping = bigTableRetainedMapping; + public VectorExpression[] getBigTableFilterExpressions() { + return bigTableFilterExpressions; + } + + public void setBigTableFilterExpressions(VectorExpression[] bigTableFilterExpressions) { + this.bigTableFilterExpressions = bigTableFilterExpressions; + } + + public void setBigTableRetainColumnMap(int[] bigTableRetainColumnMap) { + this.bigTableRetainColumnMap = bigTableRetainColumnMap; + } + + public int[] getBigTableRetainColumnMap() { + return bigTableRetainColumnMap; + } + + public void setBigTableRetainTypeInfos(TypeInfo[] bigTableRetainTypeInfos) { + this.bigTableRetainTypeInfos = bigTableRetainTypeInfos; + } + + public TypeInfo[] getBigTableRetainTypeInfos() { + return bigTableRetainTypeInfos; + } + + public void setNonOuterSmallTableKeyColumnMap(int[] nonOuterSmallTableKeyColumnMap) { + this.nonOuterSmallTableKeyColumnMap = nonOuterSmallTableKeyColumnMap; + } + + public int[] getNonOuterSmallTableKeyColumnMap() { + return nonOuterSmallTableKeyColumnMap; + } + + public void setNonOuterSmallTableKeyTypeInfos(TypeInfo[] nonOuterSmallTableKeyTypeInfos) { + this.nonOuterSmallTableKeyTypeInfos = nonOuterSmallTableKeyTypeInfos; + } + + public TypeInfo[] getNonOuterSmallTableKeyTypeInfos() { + return nonOuterSmallTableKeyTypeInfos; + } + + public void setOuterSmallTableKeyMapping(VectorColumnOutputMapping outerSmallTableKeyMapping) { + this.outerSmallTableKeyMapping = outerSmallTableKeyMapping; } - public VectorColumnOutputMapping getBigTableRetainedMapping() { - return bigTableRetainedMapping; + public VectorColumnOutputMapping getOuterSmallTableKeyMapping() { + return outerSmallTableKeyMapping; } - public void setBigTableOuterKeyMapping(VectorColumnOutputMapping bigTableOuterKeyMapping) { - this.bigTableOuterKeyMapping = bigTableOuterKeyMapping; + public void setFullOuterSmallTableKeyMapping( + VectorColumnSourceMapping fullOuterSmallTableKeyMapping) { + this.fullOuterSmallTableKeyMapping = fullOuterSmallTableKeyMapping; } - public VectorColumnOutputMapping getBigTableOuterKeyMapping() { - return bigTableOuterKeyMapping; + public VectorColumnSourceMapping getFullOuterSmallTableKeyMapping() { + return fullOuterSmallTableKeyMapping; } - public void setSmallTableMapping(VectorColumnSourceMapping smallTableMapping) { - this.smallTableMapping = smallTableMapping; + public void setSmallTableValueMapping(VectorColumnSourceMapping smallTableValueMapping) { + this.smallTableValueMapping = smallTableValueMapping; } - public VectorColumnSourceMapping getSmallTableMapping() { - return smallTableMapping; + public VectorColumnSourceMapping getSmallTableValueMapping() { + return smallTableValueMapping; } public void setProjectionMapping(VectorColumnSourceMapping projectionMapping) { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java index 5cb7061868..807eca92f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java @@ -236,11 +236,13 @@ protected BasePartitionEvaluator createPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { + ObjectInspector outputOI, + boolean nullsLast) { try { - return new BasePartitionEvaluator.AvgPartitionDoubleEvaluator(this, winFrame, partition, parameters, inputOI, outputOI); + return new BasePartitionEvaluator.AvgPartitionDoubleEvaluator(this, winFrame, partition, + parameters, inputOI, outputOI, nullsLast); } catch(HiveException e) { - return super.createPartitionEvaluator(winFrame, partition, parameters, outputOI); + return super.createPartitionEvaluator(winFrame, partition, parameters, outputOI, nullsLast); } } } @@ -414,11 +416,13 @@ protected BasePartitionEvaluator createPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { + ObjectInspector outputOI, + boolean nullsLast) { try { - return new BasePartitionEvaluator.AvgPartitionHiveDecimalEvaluator(this, winFrame, partition, parameters, inputOI, outputOI); + return new BasePartitionEvaluator.AvgPartitionHiveDecimalEvaluator(this, winFrame, + partition, parameters, inputOI, outputOI, nullsLast); } catch(HiveException e) { - return super.createPartitionEvaluator(winFrame, partition, parameters, outputOI); + return super.createPartitionEvaluator(winFrame, partition, parameters, outputOI, nullsLast); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java index b02ca0708b..960d8fdb89 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java @@ -305,6 +305,7 @@ public String getExprString() { * @param partition the partition data * @param parameters the list of the expressions in the function * @param outputOI the output object inspector + * @param nullsLast the nulls last configuration * @return the evaluator, default to BasePartitionEvaluator which * implements the naive approach */ @@ -312,9 +313,10 @@ public final BasePartitionEvaluator getPartitionWindowingEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { + ObjectInspector outputOI, boolean nullsLast) { if (partitionEvaluator == null) { - partitionEvaluator = createPartitionEvaluator(winFrame, partition, parameters, outputOI); + partitionEvaluator = createPartitionEvaluator(winFrame, partition, parameters, outputOI, + nullsLast); } return partitionEvaluator; @@ -328,7 +330,8 @@ protected BasePartitionEvaluator createPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { - return new BasePartitionEvaluator(this, winFrame, partition, parameters, outputOI); + ObjectInspector outputOI, + boolean nullsLast) { + return new BasePartitionEvaluator(this, winFrame, partition, parameters, outputOI, nullsLast); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java index e30b903496..79bf2be4ec 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java @@ -370,8 +370,10 @@ protected BasePartitionEvaluator createPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { - return new BasePartitionEvaluator.SumPartitionHiveDecimalEvaluator(this, winFrame, partition, parameters, outputOI); + ObjectInspector outputOI, + boolean nullsLast) { + return new BasePartitionEvaluator.SumPartitionHiveDecimalEvaluator(this, winFrame, + partition, parameters, outputOI, nullsLast); } } @@ -501,8 +503,10 @@ protected BasePartitionEvaluator createPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { - return new BasePartitionEvaluator.SumPartitionDoubleEvaluator(this, winFrame, partition, parameters, outputOI); + ObjectInspector outputOI, + boolean nullsLast) { + return new BasePartitionEvaluator.SumPartitionDoubleEvaluator(this, winFrame, partition, + parameters, outputOI, nullsLast); } } @@ -627,8 +631,10 @@ protected BasePartitionEvaluator createPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { - return new BasePartitionEvaluator.SumPartitionLongEvaluator(this, winFrame, partition, parameters, outputOI); + ObjectInspector outputOI, + boolean nullsLast) { + return new BasePartitionEvaluator.SumPartitionLongEvaluator(this, winFrame, partition, + parameters, outputOI, nullsLast); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/BasePartitionEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/BasePartitionEvaluator.java index ac839690a9..d44604d2ec 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/BasePartitionEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/BasePartitionEvaluator.java @@ -54,6 +54,7 @@ protected final PTFPartition partition; protected final List parameters; protected final ObjectInspector outputOI; + protected final boolean nullsLast; /** * Internal class to represent a window range in a partition by searching the @@ -181,12 +182,14 @@ public BasePartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { + ObjectInspector outputOI, + boolean nullsLast) { this.wrappedEvaluator = wrappedEvaluator; this.winFrame = winFrame; this.partition = partition; this.parameters = parameters; this.outputOI = outputOI; + this.nullsLast = nullsLast; } /** @@ -206,7 +209,7 @@ public Object getPartitionAgg() throws HiveException { * @throws HiveException */ public Object iterate(int currentRow, LeadLagInfo leadLagInfo) throws HiveException { - Range range = getRange(winFrame, currentRow, partition); + Range range = getRange(winFrame, currentRow, partition, nullsLast); PTFPartitionIterator pItr = range.iterator(); return calcFunctionValue(pItr, leadLagInfo); } @@ -242,8 +245,8 @@ protected Object calcFunctionValue(PTFPartitionIterator pItr, LeadLagInf return ObjectInspectorUtils.copyToStandardObject(wrappedEvaluator.evaluate(aggBuffer), outputOI); } - protected static Range getRange(WindowFrameDef winFrame, int currRow, PTFPartition p) - throws HiveException { + protected static Range getRange(WindowFrameDef winFrame, int currRow, PTFPartition p, + boolean nullsLast) throws HiveException { BoundaryDef startB = winFrame.getStart(); BoundaryDef endB = winFrame.getEnd(); @@ -252,7 +255,7 @@ protected static Range getRange(WindowFrameDef winFrame, int currRow, PTFPartiti start = getRowBoundaryStart(startB, currRow); end = getRowBoundaryEnd(endB, currRow, p); } else { - ValueBoundaryScanner vbs = ValueBoundaryScanner.getScanner(winFrame); + ValueBoundaryScanner vbs = ValueBoundaryScanner.getScanner(winFrame, nullsLast); start = vbs.computeStart(currRow, p); end = vbs.computeEnd(currRow, p); } @@ -323,8 +326,9 @@ public SumPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + ObjectInspector outputOI, + boolean nullsLast) { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); sumAgg = new WindowSumAgg(); } @@ -336,7 +340,7 @@ public Object iterate(int currentRow, LeadLagInfo leadLagInfo) throws HiveExcept return super.iterate(currentRow, leadLagInfo); } - Range currentRange = getRange(winFrame, currentRow, partition); + Range currentRange = getRange(winFrame, currentRow, partition, nullsLast); ResultType result; if (currentRow == 0 || // Reset for the new partition sumAgg.prevRange == null || @@ -365,8 +369,8 @@ public Object iterate(int currentRow, LeadLagInfo leadLagInfo) throws HiveExcept public static class SumPartitionDoubleEvaluator extends SumPartitionEvaluator { public SumPartitionDoubleEvaluator(GenericUDAFEvaluator wrappedEvaluator, WindowFrameDef winFrame, PTFPartition partition, - List parameters, ObjectInspector outputOI) { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + List parameters, ObjectInspector outputOI, boolean nullsLast) { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); this.typeOperation = new TypeOperationDoubleWritable(); } } @@ -374,8 +378,8 @@ public SumPartitionDoubleEvaluator(GenericUDAFEvaluator wrappedEvaluator, public static class SumPartitionLongEvaluator extends SumPartitionEvaluator { public SumPartitionLongEvaluator(GenericUDAFEvaluator wrappedEvaluator, WindowFrameDef winFrame, PTFPartition partition, - List parameters, ObjectInspector outputOI) { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + List parameters, ObjectInspector outputOI, boolean nullsLast) { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); this.typeOperation = new TypeOperationLongWritable(); } } @@ -383,8 +387,8 @@ public SumPartitionLongEvaluator(GenericUDAFEvaluator wrappedEvaluator, public static class SumPartitionHiveDecimalEvaluator extends SumPartitionEvaluator { public SumPartitionHiveDecimalEvaluator(GenericUDAFEvaluator wrappedEvaluator, WindowFrameDef winFrame, PTFPartition partition, - List parameters, ObjectInspector outputOI) { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + List parameters, ObjectInspector outputOI, boolean nullsLast) { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); this.typeOperation = new TypeOperationHiveDecimalWritable(); } } @@ -411,8 +415,9 @@ public AvgPartitionEvaluator( WindowFrameDef winFrame, PTFPartition partition, List parameters, - ObjectInspector outputOI) { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + ObjectInspector outputOI, + boolean nullsLast) { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); } /** @@ -453,7 +458,7 @@ public Object iterate(int currentRow, LeadLagInfo leadLagInfo) throws HiveExcept return super.iterate(currentRow, leadLagInfo); } - Range currentRange = getRange(winFrame, currentRow, partition); + Range currentRange = getRange(winFrame, currentRow, partition, nullsLast); if (currentRow == 0 || // Reset for the new partition avgAgg.prevRange == null || currentRange.getSize() <= currentRange.getDiff(avgAgg.prevRange)) { @@ -485,8 +490,9 @@ public Object iterate(int currentRow, LeadLagInfo leadLagInfo) throws HiveExcept public AvgPartitionDoubleEvaluator(GenericUDAFEvaluator wrappedEvaluator, WindowFrameDef winFrame, PTFPartition partition, - List parameters, ObjectInspector inputOI, ObjectInspector outputOI) throws HiveException { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + List parameters, ObjectInspector inputOI, ObjectInspector outputOI, + boolean nullsLast) throws HiveException { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); this.typeOperation = new TypeOperationDoubleWritable(); } } @@ -495,8 +501,9 @@ public AvgPartitionDoubleEvaluator(GenericUDAFEvaluator wrappedEvaluator, public AvgPartitionHiveDecimalEvaluator(GenericUDAFEvaluator wrappedEvaluator, WindowFrameDef winFrame, PTFPartition partition, - List parameters, ObjectInspector inputOI, ObjectInspector outputOI) throws HiveException { - super(wrappedEvaluator, winFrame, partition, parameters, outputOI); + List parameters, ObjectInspector inputOI, ObjectInspector outputOI, + boolean nullsLast) throws HiveException { + super(wrappedEvaluator, winFrame, partition, parameters, outputOI, nullsLast); this.typeOperation = new TypeOperationHiveDecimalWritable(); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java index 7d5f92c1a1..e2b7035254 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java @@ -93,6 +93,15 @@ boolean transformsRawInput; transient protected PTFPartition outputPartition; transient protected boolean canAcceptInputAsStream; + protected boolean nullsLast; + + public boolean getNullsLast() { + return nullsLast; + } + + public void setNullsLast(boolean nullsLast) { + this.nullsLast = nullsLast; + } public StructObjectInspector getOutputOI() { return OI; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java index 5b81a43240..dbc7693420 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java @@ -20,6 +20,7 @@ import java.util.List; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -71,17 +72,19 @@ public void initialize(HiveConf cfg, PTFDesc ptfDesc, PartitionedTableFunctionDe evaluator.setTransformsRawInput(transformsRawInput()); evaluator.setTableDef(tDef); evaluator.setQueryDef(ptfDesc); + evaluator.setNullsLast(HiveConf.getBoolVar(cfg, HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST)); } /* * called during deserialization of a QueryDef during runtime. */ - public void initialize(PTFDesc ptfDesc, PartitionedTableFunctionDef tDef, TableFunctionEvaluator evaluator) + public void initialize(Configuration cfg, PTFDesc ptfDesc, PartitionedTableFunctionDef tDef, TableFunctionEvaluator evaluator) throws HiveException { this.evaluator = evaluator; this.ptfDesc = ptfDesc; evaluator.setTableDef(tDef); evaluator.setQueryDef(ptfDesc); + evaluator.setNullsLast(HiveConf.getBoolVar(cfg, HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST)); } public TableFunctionEvaluator getEvaluator() { diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java index b34c4d63d4..e633edb96e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java @@ -36,24 +36,28 @@ public abstract class ValueBoundaryScanner { BoundaryDef start, end; + protected final boolean nullsLast; - public ValueBoundaryScanner(BoundaryDef start, BoundaryDef end) { + public ValueBoundaryScanner(BoundaryDef start, BoundaryDef end, boolean nullsLast) { this.start = start; this.end = end; + this.nullsLast = nullsLast; } public abstract int computeStart(int rowIdx, PTFPartition p) throws HiveException; public abstract int computeEnd(int rowIdx, PTFPartition p) throws HiveException; - public static ValueBoundaryScanner getScanner(WindowFrameDef winFrameDef) + public static ValueBoundaryScanner getScanner(WindowFrameDef winFrameDef, boolean nullsLast) throws HiveException { OrderDef orderDef = winFrameDef.getOrderDef(); int numOrders = orderDef.getExpressions().size(); if (numOrders != 1) { - return new MultiValueBoundaryScanner(winFrameDef.getStart(), winFrameDef.getEnd(), orderDef); + return new MultiValueBoundaryScanner(winFrameDef.getStart(), winFrameDef.getEnd(), orderDef, + nullsLast); } else { - return SingleValueBoundaryScanner.getScanner(winFrameDef.getStart(), winFrameDef.getEnd(), orderDef); + return SingleValueBoundaryScanner.getScanner(winFrameDef.getStart(), winFrameDef.getEnd(), + orderDef, nullsLast); } } } @@ -65,8 +69,9 @@ public static ValueBoundaryScanner getScanner(WindowFrameDef winFrameDef) abstract class SingleValueBoundaryScanner extends ValueBoundaryScanner { OrderExpressionDef expressionDef; - public SingleValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end); + public SingleValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, nullsLast); this.expressionDef = expressionDef; } @@ -125,11 +130,8 @@ protected int computeStartPreceding(int rowIdx, PTFPartition p) throws HiveExcep Object sortKey = computeValue(p.getAt(rowIdx)); if ( sortKey == null ) { - // Use Case 2. - if ( expressionDef.getOrder() == Order.ASC ) { - return 0; - } - else { // Use Case 3. + // Use Case 3. + if (nullsLast || expressionDef.getOrder() == Order.DESC) { while ( sortKey == null && rowIdx >= 0 ) { --rowIdx; if ( rowIdx >= 0 ) { @@ -138,6 +140,11 @@ protected int computeStartPreceding(int rowIdx, PTFPartition p) throws HiveExcep } return rowIdx+1; } + else { // Use Case 2. + if ( expressionDef.getOrder() == Order.ASC ) { + return 0; + } + } } Object rowVal = sortKey; @@ -200,7 +207,7 @@ protected int computeStartFollowing(int rowIdx, PTFPartition p) throws HiveExcep if ( sortKey == null ) { // Use Case 9. - if ( expressionDef.getOrder() == Order.DESC) { + if (nullsLast || expressionDef.getOrder() == Order.DESC) { return p.size(); } else { // Use Case 10. @@ -289,7 +296,7 @@ protected int computeEndPreceding(int rowIdx, PTFPartition p) throws HiveExcepti if ( sortKey == null ) { // Use Case 2. - if ( expressionDef.getOrder() == Order.DESC ) { + if (nullsLast || expressionDef.getOrder() == Order.DESC ) { return p.size(); } else { // Use Case 3. @@ -362,7 +369,7 @@ protected int computeEndFollowing(int rowIdx, PTFPartition p) throws HiveExcepti if ( sortKey == null ) { // Use Case 9. - if ( expressionDef.getOrder() == Order.DESC) { + if (nullsLast || expressionDef.getOrder() == Order.DESC) { return p.size(); } else { // Use Case 10. @@ -416,8 +423,8 @@ public Object computeValue(Object row) throws HiveException { @SuppressWarnings("incomplete-switch") - public static SingleValueBoundaryScanner getScanner(BoundaryDef start, BoundaryDef end, OrderDef orderDef) - throws HiveException { + public static SingleValueBoundaryScanner getScanner(BoundaryDef start, BoundaryDef end, + OrderDef orderDef, boolean nullsLast) throws HiveException { if (orderDef.getExpressions().size() != 1) { throw new HiveException("Internal error: initializing SingleValueBoundaryScanner with" + " multiple expression for sorting"); @@ -429,20 +436,20 @@ public static SingleValueBoundaryScanner getScanner(BoundaryDef start, BoundaryD case INT: case LONG: case SHORT: - return new LongValueBoundaryScanner(start, end, exprDef); + return new LongValueBoundaryScanner(start, end, exprDef, nullsLast); case TIMESTAMP: - return new TimestampValueBoundaryScanner(start, end, exprDef); + return new TimestampValueBoundaryScanner(start, end, exprDef, nullsLast); case TIMESTAMPLOCALTZ: - return new TimestampLocalTZValueBoundaryScanner(start, end, exprDef); + return new TimestampLocalTZValueBoundaryScanner(start, end, exprDef, nullsLast); case DOUBLE: case FLOAT: - return new DoubleValueBoundaryScanner(start, end, exprDef); + return new DoubleValueBoundaryScanner(start, end, exprDef, nullsLast); case DECIMAL: - return new HiveDecimalValueBoundaryScanner(start, end, exprDef); + return new HiveDecimalValueBoundaryScanner(start, end, exprDef, nullsLast); case DATE: - return new DateValueBoundaryScanner(start, end, exprDef); + return new DateValueBoundaryScanner(start, end, exprDef, nullsLast); case STRING: - return new StringValueBoundaryScanner(start, end, exprDef); + return new StringValueBoundaryScanner(start, end, exprDef, nullsLast); } throw new HiveException( String.format("Internal Error: attempt to setup a Window for datatype %s", @@ -451,8 +458,9 @@ public static SingleValueBoundaryScanner getScanner(BoundaryDef start, BoundaryD } class LongValueBoundaryScanner extends SingleValueBoundaryScanner { - public LongValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public LongValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, expressionDef, nullsLast); } @Override @@ -483,8 +491,9 @@ public boolean isEqual(Object v1, Object v2) { } class DoubleValueBoundaryScanner extends SingleValueBoundaryScanner { - public DoubleValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public DoubleValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, expressionDef, nullsLast); } @Override @@ -515,8 +524,9 @@ public boolean isEqual(Object v1, Object v2) { } class HiveDecimalValueBoundaryScanner extends SingleValueBoundaryScanner { - public HiveDecimalValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public HiveDecimalValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, expressionDef, nullsLast); } @Override @@ -547,8 +557,9 @@ public boolean isEqual(Object v1, Object v2) { } class DateValueBoundaryScanner extends SingleValueBoundaryScanner { - public DateValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public DateValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, expressionDef, nullsLast); } @Override @@ -574,8 +585,9 @@ public boolean isEqual(Object v1, Object v2) { } class TimestampValueBoundaryScanner extends SingleValueBoundaryScanner { - public TimestampValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public TimestampValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end,expressionDef, nullsLast); } @Override @@ -604,8 +616,9 @@ public boolean isEqual(Object v1, Object v2) { } class TimestampLocalTZValueBoundaryScanner extends SingleValueBoundaryScanner { - public TimestampLocalTZValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public TimestampLocalTZValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, expressionDef, nullsLast); } @Override @@ -634,8 +647,9 @@ public boolean isEqual(Object v1, Object v2) { } class StringValueBoundaryScanner extends SingleValueBoundaryScanner { - public StringValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderExpressionDef expressionDef) { - super(start, end,expressionDef); + public StringValueBoundaryScanner(BoundaryDef start, BoundaryDef end, + OrderExpressionDef expressionDef, boolean nullsLast) { + super(start, end, expressionDef, nullsLast); } @Override @@ -662,8 +676,9 @@ public boolean isEqual(Object v1, Object v2) { class MultiValueBoundaryScanner extends ValueBoundaryScanner { OrderDef orderDef; - public MultiValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderDef orderDef) { - super(start, end); + public MultiValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderDef orderDef, + boolean nullsLast) { + super(start, end, nullsLast); this.orderDef = orderDef; } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java index 07b1e2e0f9..5f9009c484 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java @@ -143,7 +143,7 @@ public void execute(PTFPartitionIterator pItr, PTFPartition outP) throws private Object evaluateWindowFunction(WindowFunctionDef wFn, int rowToProcess, PTFPartition partition) throws HiveException { BasePartitionEvaluator partitionEval = wFn.getWFnEval() - .getPartitionWindowingEvaluator(wFn.getWindowFrame(), partition, wFn.getArgs(), wFn.getOI()); + .getPartitionWindowingEvaluator(wFn.getWindowFrame(), partition, wFn.getArgs(), wFn.getOI(), nullsLast); return partitionEval.iterate(rowToProcess, ptfDesc.getLlInfo()); } @@ -151,7 +151,7 @@ private Object evaluateWindowFunction(WindowFunctionDef wFn, int rowToProcess, P private Object evaluateFunctionOnPartition(WindowFunctionDef wFn, PTFPartition partition) throws HiveException { BasePartitionEvaluator partitionEval = wFn.getWFnEval() - .getPartitionWindowingEvaluator(wFn.getWindowFrame(), partition, wFn.getArgs(), wFn.getOI()); + .getPartitionWindowingEvaluator(wFn.getWindowFrame(), partition, wFn.getArgs(), wFn.getOI(), nullsLast); return partitionEval.getPartitionAgg(); } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java index 9f785e6257..e5c749fd19 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java @@ -84,9 +84,9 @@ public void testGetNonExistent() throws Exception { map.put(kv2, -1); key[0] = (byte)(key[0] + 1); BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result(); - map.getValueResult(key, 0, key.length, hashMapResult); + map.getValueResult(key, 0, key.length, hashMapResult, null); assertTrue(!hashMapResult.hasRows()); - map.getValueResult(key, 0, 0, hashMapResult); + map.getValueResult(key, 0, 0, hashMapResult, null); assertTrue(!hashMapResult.hasRows()); } @@ -104,7 +104,7 @@ public void testPutWithFullMap() throws Exception { assertEquals(CAPACITY, map.getCapacity()); // Get of non-existent key should terminate.. BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result(); - map.getValueResult(new byte[0], 0, 0, hashMapResult); + map.getValueResult(new byte[0], 0, 0, hashMapResult, null); } @Test @@ -123,7 +123,7 @@ public void testExpand() throws Exception { private void verifyHashMapResult(BytesBytesMultiHashMap map, byte[] key, byte[]... values) { BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result(); - byte state = map.getValueResult(key, 0, key.length, hashMapResult); + byte state = map.getValueResult(key, 0, key.length, hashMapResult, null); HashSet hs = new HashSet(); int count = 0; if (hashMapResult.hasRows()) { diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/CollectorTestOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/CollectorTestOperator.java index 6491d79e19..244208b818 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/CollectorTestOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/CollectorTestOperator.java @@ -26,8 +26,22 @@ private static final long serialVersionUID = 1L; + private boolean isClosed; + private boolean isAborted; + public CollectorTestOperator() { super(); + + isClosed = false; + isAborted = false; + } + + public boolean getIsClosed() { + return isClosed; + } + + public boolean getIsAborted() { + return isAborted; } @Override @@ -35,6 +49,14 @@ public void process(Object row, int tag) throws HiveException { // Do nothing. } + @Override + public void closeOp(boolean abort) { + isClosed = true; + if (abort) { + isAborted = true; + } + } + @Override public String getName() { return CollectorTestOperator.class.getSimpleName(); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowCollectorTestOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowCollectorTestOperator.java index 18933d4fbd..ce90a6da88 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowCollectorTestOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowCollectorTestOperator.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hive.ql.exec.util.collectoroperator; +import java.util.ArrayList; +import java.util.List; + import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -28,19 +31,30 @@ private static final long serialVersionUID = 1L; private final ObjectInspector[] outputObjectInspectors; + private final int columnSize; public RowCollectorTestOperator(ObjectInspector[] outputObjectInspectors) { super(); this.outputObjectInspectors = outputObjectInspectors; + columnSize = outputObjectInspectors.length; } @Override public void process(Object row, int tag) throws HiveException { rowCount++; - Object[] rowObjectArray = (Object[]) row; - Object[] resultObjectArray = new Object[rowObjectArray.length]; - for (int c = 0; c < rowObjectArray.length; c++) { - resultObjectArray[c] = ((PrimitiveObjectInspector) outputObjectInspectors[c]).copyObject(rowObjectArray[c]); + Object[] resultObjectArray = new Object[columnSize]; + if (row instanceof ArrayList) { + List rowObjectList = (ArrayList) row; + for (int c = 0; c < columnSize; c++) { + resultObjectArray[c] = + ((PrimitiveObjectInspector) outputObjectInspectors[c]).copyObject(rowObjectList.get(c)); + } + } else { + Object[] rowObjectArray = (Object[]) row; + for (int c = 0; c < columnSize; c++) { + resultObjectArray[c] = + ((PrimitiveObjectInspector) outputObjectInspectors[c]).copyObject(rowObjectArray[c]); + } } nextTestRow(new RowTestObjects(resultObjectArray)); } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowVectorCollectorTestOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowVectorCollectorTestOperator.java index 06cd1e9727..a2f9f04412 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowVectorCollectorTestOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/util/collectoroperator/RowVectorCollectorTestOperator.java @@ -39,6 +39,16 @@ public RowVectorCollectorTestOperator(TypeInfo[] outputTypeInfos, vectorExtractRow.init(outputTypeInfos); } + public RowVectorCollectorTestOperator( + int[] outputProjectionColumnNums, + TypeInfo[] outputTypeInfos, + ObjectInspector[] outputObjectInspectors) throws HiveException { + super(); + this.outputObjectInspectors = outputObjectInspectors; + vectorExtractRow = new VectorExtractRow(); + vectorExtractRow.init(outputTypeInfos, outputProjectionColumnNums); + } + @Override public void process(Object row, int tag) throws HiveException { VectorizedRowBatch batch = (VectorizedRowBatch) row; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/util/rowobjects/RowTestObjectsMultiSet.java ql/src/test/org/apache/hadoop/hive/ql/exec/util/rowobjects/RowTestObjectsMultiSet.java index 51a5f8e412..577387ad71 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/util/rowobjects/RowTestObjectsMultiSet.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/util/rowobjects/RowTestObjectsMultiSet.java @@ -26,55 +26,129 @@ import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects; public class RowTestObjectsMultiSet { - private SortedMap sortedMap; - private int rowCount; - private int totalCount; + + public enum RowFlag { + NONE (0), + REGULAR (0x01), + LEFT_OUTER (0x02), + FULL_OUTER (0x04); + + public final long value; + RowFlag(long value) { + this.value = value; + } + } + + private static class Value { + + // Mutable. + public int count; + public long rowFlags; + + public final int initialKeyCount; + public final int initialValueCount; + public final RowFlag initialRowFlag; + + public Value(int count, RowFlag rowFlag, int totalKeyCount, int totalValueCount) { + this.count = count; + this.rowFlags = rowFlag.value; + + initialKeyCount = totalKeyCount; + initialValueCount = totalValueCount; + initialRowFlag = rowFlag; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("count "); + sb.append(count); + return sb.toString(); + } + } + + private SortedMap sortedMap; + private int totalKeyCount; + private int totalValueCount; public RowTestObjectsMultiSet() { - sortedMap = new TreeMap(); - rowCount = 0; - totalCount = 0; + sortedMap = new TreeMap(); + totalKeyCount = 0; + totalValueCount = 0; } - public int getRowCount() { - return rowCount; + public int getTotalKeyCount() { + return totalKeyCount; } - public int getTotalCount() { - return totalCount; + public int getTotalValueCount() { + return totalValueCount; } - public void add(RowTestObjects testRow) { + public void add(RowTestObjects testRow, RowFlag rowFlag) { if (sortedMap.containsKey(testRow)) { - Integer count = sortedMap.get(testRow); - count++; + Value value = sortedMap.get(testRow); + value.count++; + value.rowFlags |= rowFlag.value; + totalValueCount++; } else { - sortedMap.put(testRow, 1); - rowCount++; + sortedMap.put(testRow, new Value(1, rowFlag, ++totalKeyCount, ++totalValueCount)); + } + + } + + public void add(RowTestObjects testRow, int count) { + if (sortedMap.containsKey(testRow)) { + throw new RuntimeException(); + } + sortedMap.put(testRow, new Value(count, RowFlag.NONE, ++totalKeyCount, ++totalValueCount)); + } + + public String displayRowFlags(long rowFlags) { + StringBuilder sb = new StringBuilder(); + sb.append("{"); + for (RowFlag rowFlag : RowFlag.values()) { + if ((rowFlags & rowFlag.value) != 0) { + if (sb.length() > 1) { + sb.append(", "); + } + sb.append(rowFlag.name()); + } } - totalCount++; + sb.append("}"); + return sb.toString(); } - public boolean verify(RowTestObjectsMultiSet other) { + public boolean verify(RowTestObjectsMultiSet other, String left, String right) { final int thisSize = this.sortedMap.size(); final int otherSize = other.sortedMap.size(); if (thisSize != otherSize) { - System.out.println("*VERIFY* count " + thisSize + " doesn't match otherSize " + otherSize); + System.out.println("*BENCHMARK* " + left + " count " + thisSize + " doesn't match " + right + " " + otherSize); return false; } - Iterator> thisIterator = this.sortedMap.entrySet().iterator(); - Iterator> otherIterator = other.sortedMap.entrySet().iterator(); + Iterator> thisIterator = this.sortedMap.entrySet().iterator(); + Iterator> otherIterator = other.sortedMap.entrySet().iterator(); for (int i = 0; i < thisSize; i++) { - Entry thisEntry = thisIterator.next(); - Entry otherEntry = otherIterator.next(); + Entry thisEntry = thisIterator.next(); + Entry otherEntry = otherIterator.next(); if (!thisEntry.getKey().equals(otherEntry.getKey())) { - System.out.println("*VERIFY* thisEntry.getKey() " + thisEntry.getKey() + " doesn't match otherEntry.getKey() " + otherEntry.getKey()); + System.out.println("*BENCHMARK* " + left + " row " + thisEntry.getKey().toString() + + " (rowFlags " + displayRowFlags(thisEntry.getValue().rowFlags) + + " count " + thisEntry.getValue().count + ")" + + " but found " + right + " row " + otherEntry.getKey().toString() + + " (initialKeyCount " + + otherEntry.getValue().initialKeyCount + + " initialValueCount " + otherEntry.getValue().initialValueCount + ")"); return false; } // Check multi-set count. - if (!thisEntry.getValue().equals(otherEntry.getValue())) { - System.out.println("*VERIFY* key " + thisEntry.getKey() + " count " + thisEntry.getValue() + " doesn't match " + otherEntry.getValue()); + if (thisEntry.getValue().count != otherEntry.getValue().count) { + System.out.println("*BENCHMARK* " + left + " row " + thisEntry.getKey().toString() + + " count " + thisEntry.getValue().count + + " (rowFlags " + displayRowFlags(thisEntry.getValue().rowFlags) + ")" + + " doesn't match " + right + " row count " + otherEntry.getValue().count + + " (initialKeyCount " + + otherEntry.getValue().initialKeyCount + + " initialValueCount " + otherEntry.getValue().initialValueCount + ")"); return false; } } @@ -84,6 +158,51 @@ public boolean verify(RowTestObjectsMultiSet other) { return true; } + public RowTestObjectsMultiSet subtract(RowTestObjectsMultiSet other) { + RowTestObjectsMultiSet result = new RowTestObjectsMultiSet(); + + Iterator> thisIterator = this.sortedMap.entrySet().iterator(); + while (thisIterator.hasNext()) { + Entry thisEntry = thisIterator.next(); + + if (other.sortedMap.containsKey(thisEntry.getKey())) { + Value thisValue = thisEntry.getValue(); + Value otherValue = other.sortedMap.get(thisEntry.getKey()); + if (thisValue.count == otherValue.count) { + continue; + } + } + result.add(thisEntry.getKey(), thisEntry.getValue().count); + } + + return result; + } + + public void displayDifferences(RowTestObjectsMultiSet other, String left, String right) { + + RowTestObjectsMultiSet leftOnly = this.subtract(other); + Iterator> leftOnlyIterator = + leftOnly.sortedMap.entrySet().iterator(); + while (leftOnlyIterator.hasNext()) { + Entry leftOnlyEntry = leftOnlyIterator.next(); + System.out.println( + "*BENCHMARK* " + left + " only row " + leftOnlyEntry.getKey().toString() + + " count " + leftOnlyEntry.getValue().count + + " (initialRowFlag " + leftOnlyEntry.getValue().initialRowFlag.name() + ")"); + } + + RowTestObjectsMultiSet rightOnly = other.subtract(this); + Iterator> rightOnlyIterator = + rightOnly.sortedMap.entrySet().iterator(); + while (rightOnlyIterator.hasNext()) { + Entry rightOnlyEntry = rightOnlyIterator.next(); + System.out.println( + "*BENCHMARK* " + right + " only row " + rightOnlyEntry.getKey().toString() + + " count " + rightOnlyEntry.getValue().count + + " (initialRowFlag " + rightOnlyEntry.getValue().initialRowFlag.name() + ")"); + } + } + @Override public String toString() { return sortedMap.toString(); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java index 6fd8e0957f..4ebf3ee709 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java @@ -35,8 +35,6 @@ private static int TEST_COUNT = 5000; - private static int fake = 0; - @Test public void testDouble() throws Exception { diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java index 2d0c783187..6ce63a4f79 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java @@ -62,9 +62,8 @@ public ValidatorVectorSelectOperator(CompilationOpContext ctx, OperatorDesc conf * Override forward to do validation */ @Override - public void forward(Object row, ObjectInspector rowInspector, boolean isVectorized) + public void vectorForward(VectorizedRowBatch vrg) throws HiveException { - VectorizedRowBatch vrg = (VectorizedRowBatch) row; int[] projections = vrg.projectedColumns; assertEquals(2, vrg.projectionSize); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java index b84273ade5..a2febe4919 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java @@ -170,15 +170,18 @@ public boolean getAddPadding() { private final GenerationKind generationKind; private final TypeInfo typeInfo; + private final boolean columnAllowNulls; private final TypeInfo sourceTypeInfo; private final StringGenerationOption stringGenerationOption; private final List valueList; - private GenerationSpec(GenerationKind generationKind, TypeInfo typeInfo, + private GenerationSpec( + GenerationKind generationKind, TypeInfo typeInfo, boolean columnAllowNulls, TypeInfo sourceTypeInfo, StringGenerationOption stringGenerationOption, List valueList) { this.generationKind = generationKind; this.typeInfo = typeInfo; + this.columnAllowNulls = columnAllowNulls; this.sourceTypeInfo = sourceTypeInfo; this.stringGenerationOption = stringGenerationOption; this.valueList = valueList; @@ -192,6 +195,10 @@ public TypeInfo getTypeInfo() { return typeInfo; } + public boolean getColumnAllowNulls() { + return columnAllowNulls; + } + public TypeInfo getSourceTypeInfo() { return sourceTypeInfo; } @@ -206,34 +213,46 @@ public StringGenerationOption getStringGenerationOption() { public static GenerationSpec createSameType(TypeInfo typeInfo) { return new GenerationSpec( - GenerationKind.SAME_TYPE, typeInfo, null, null, null); + GenerationKind.SAME_TYPE, typeInfo, true, + null, null, null); + } + + public static GenerationSpec createSameType(TypeInfo typeInfo, boolean allowNulls) { + return new GenerationSpec( + GenerationKind.SAME_TYPE, typeInfo, allowNulls, + null, null, null); } public static GenerationSpec createOmitGeneration(TypeInfo typeInfo) { return new GenerationSpec( - GenerationKind.OMIT_GENERATION, typeInfo, null, null, null); + GenerationKind.OMIT_GENERATION, typeInfo, true, + null, null, null); } public static GenerationSpec createStringFamily(TypeInfo typeInfo, StringGenerationOption stringGenerationOption) { return new GenerationSpec( - GenerationKind.STRING_FAMILY, typeInfo, null, stringGenerationOption, null); + GenerationKind.STRING_FAMILY, typeInfo, true, + null, stringGenerationOption, null); } public static GenerationSpec createStringFamilyOtherTypeValue(TypeInfo typeInfo, TypeInfo otherTypeTypeInfo) { return new GenerationSpec( - GenerationKind.STRING_FAMILY_OTHER_TYPE_VALUE, typeInfo, otherTypeTypeInfo, null, null); + GenerationKind.STRING_FAMILY_OTHER_TYPE_VALUE, typeInfo, true, + otherTypeTypeInfo, null, null); } public static GenerationSpec createTimestampMilliseconds(TypeInfo typeInfo) { return new GenerationSpec( - GenerationKind.TIMESTAMP_MILLISECONDS, typeInfo, null, null, null); + GenerationKind.TIMESTAMP_MILLISECONDS, typeInfo, true, + null, null, null); } public static GenerationSpec createValueList(TypeInfo typeInfo, List valueList) { return new GenerationSpec( - GenerationKind.VALUE_LIST, typeInfo, null, null, valueList); + GenerationKind.VALUE_LIST, typeInfo, true, + null, null, valueList); } } @@ -620,7 +639,9 @@ private void chooseSchema(SupportedTypes supportedTypes, Set allowedType if (generationSpecList != null) { typeName = generationSpecList.get(c).getTypeInfo().getTypeName(); - dataTypePhysicalVariation = explicitDataTypePhysicalVariationList.get(c); + dataTypePhysicalVariation = + explicitDataTypePhysicalVariationList != null ? + explicitDataTypePhysicalVariationList.get(c) : DataTypePhysicalVariation.NONE; } else if (onlyOne || allowedTypeNameSet != null) { typeName = getRandomTypeName(r, supportedTypes, allowedTypeNameSet); } else { @@ -887,7 +908,7 @@ public static Object randomStringFamily(Random random, TypeInfo typeInfo, Object object; switch (generationKind) { case SAME_TYPE: - object = randomWritable(c); + object = randomWritable(c, generationSpec.getColumnAllowNulls()); break; case OMIT_GENERATION: object = null; @@ -1454,6 +1475,12 @@ public static Object getNonWritableObject(Object object, TypeInfo typeInfo, } } + public Object randomWritable(int column, boolean columnAllowNull) { + return randomWritable( + r, typeInfos[column], objectInspectorList.get(column), dataTypePhysicalVariations[column], + columnAllowNull && allowNull); + } + public Object randomWritable(int column) { return randomWritable( r, typeInfos[column], objectInspectorList.get(column), dataTypePhysicalVariations[column], diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestConfig.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestConfig.java index 0514e3ff02..d127342061 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestConfig.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestConfig.java @@ -26,10 +26,13 @@ import java.util.Map; import java.util.Map.Entry; +import org.apache.commons.lang.ArrayUtils; +import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext; @@ -37,24 +40,31 @@ import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CountCollectorTestOperator; +import org.apache.hadoop.hive.ql.exec.util.collectoroperator.RowCollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.RowCollectorTestOperatorBase; +import org.apache.hadoop.hive.ql.exec.util.collectoroperator.RowVectorCollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects; +import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjectsMultiSet; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOutputMapping; import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping; +import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinBaseOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastTableContainer; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VerifyFastRow; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; +import org.apache.hadoop.hive.ql.plan.SelectDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo; @@ -62,6 +72,7 @@ import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKind; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation; +import org.apache.hadoop.hive.ql.plan.VectorSelectDesc; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -90,45 +101,204 @@ NATIVE_VECTOR_FAST } + public static boolean isVectorOutput(MapJoinTestImplementation mapJoinImplementation) { + return + (mapJoinImplementation != MapJoinTestImplementation.ROW_MODE_HASH_MAP && + mapJoinImplementation != MapJoinTestImplementation.ROW_MODE_OPTIMIZED); + } + + /* + * This test collector operator is for MapJoin row-mode. + */ + public static class TestMultiSetCollectorOperator extends RowCollectorTestOperator { + + private final RowTestObjectsMultiSet testRowMultiSet; + + public TestMultiSetCollectorOperator( + ObjectInspector[] outputObjectInspectors, + RowTestObjectsMultiSet testRowMultiSet) { + super(outputObjectInspectors); + this.testRowMultiSet = testRowMultiSet; + } + + public RowTestObjectsMultiSet getTestRowMultiSet() { + return testRowMultiSet; + } + + public void nextTestRow(RowTestObjects testRow) { + testRowMultiSet.add(testRow, RowTestObjectsMultiSet.RowFlag.NONE); + } + + @Override + public String getName() { + return TestMultiSetCollectorOperator.class.getSimpleName(); + } + } + + public static class TestMultiSetVectorCollectorOperator extends RowVectorCollectorTestOperator { + + private final RowTestObjectsMultiSet testRowMultiSet; + + public RowTestObjectsMultiSet getTestRowMultiSet() { + return testRowMultiSet; + } + + public TestMultiSetVectorCollectorOperator(TypeInfo[] outputTypeInfos, + ObjectInspector[] outputObjectInspectors, RowTestObjectsMultiSet testRowMultiSet) + throws HiveException { + super(outputTypeInfos, outputObjectInspectors); + this.testRowMultiSet = testRowMultiSet; + } + + public TestMultiSetVectorCollectorOperator( + int[] outputProjectionColumnNums, + TypeInfo[] outputTypeInfos, + ObjectInspector[] outputObjectInspectors, + RowTestObjectsMultiSet testRowMultiSet) throws HiveException { + super(outputProjectionColumnNums, outputTypeInfos, outputObjectInspectors); + this.testRowMultiSet = testRowMultiSet; + } + + public void nextTestRow(RowTestObjects testRow) { + testRowMultiSet.add(testRow, RowTestObjectsMultiSet.RowFlag.NONE); + } + + @Override + public String getName() { + return TestMultiSetVectorCollectorOperator.class.getSimpleName(); + } + } + public static MapJoinDesc createMapJoinDesc(MapJoinTestDescription testDesc) { + return createMapJoinDesc(testDesc, false); + } + + public static MapJoinDesc createMapJoinDesc(MapJoinTestDescription testDesc, + boolean isFullOuterIntersect) { MapJoinDesc mapJoinDesc = new MapJoinDesc(); + mapJoinDesc.setPosBigTable(0); - List keyExpr = new ArrayList(); + + List bigTableKeyExpr = new ArrayList(); for (int i = 0; i < testDesc.bigTableKeyColumnNums.length; i++) { - keyExpr.add(new ExprNodeColumnDesc(testDesc.bigTableKeyTypeInfos[i], testDesc.bigTableKeyColumnNames[i], "B", false)); + bigTableKeyExpr.add( + new ExprNodeColumnDesc( + testDesc.bigTableKeyTypeInfos[i], + testDesc.bigTableKeyColumnNames[i], "B", false)); } Map> keyMap = new HashMap>(); - keyMap.put((byte)0, keyExpr); + keyMap.put((byte) 0, bigTableKeyExpr); + + // Big Table expression includes all columns -- keys and extra (value) columns. + // UNDONE: Assumes all values retained... + List bigTableExpr = new ArrayList(); + for (int i = 0; i < testDesc.bigTableColumnNames.length; i++) { + bigTableExpr.add( + new ExprNodeColumnDesc( + testDesc.bigTableTypeInfos[i], + testDesc.bigTableColumnNames[i], "B", false)); + } + Map> exprMap = new HashMap>(); + exprMap.put((byte) 0, bigTableExpr); + + List smallTableKeyExpr = new ArrayList(); + + for (int i = 0; i < testDesc.smallTableKeyTypeInfos.length; i++) { + ExprNodeColumnDesc exprNodeColumnDesc = + new ExprNodeColumnDesc( + testDesc.smallTableKeyTypeInfos[i], + testDesc.smallTableKeyColumnNames[i], "S", false); + smallTableKeyExpr.add(exprNodeColumnDesc); + } + + // Retained Small Table keys and values. List smallTableExpr = new ArrayList(); - for (int i = 0; i < testDesc.smallTableValueColumnNames.length; i++) { - smallTableExpr.add(new ExprNodeColumnDesc(testDesc.smallTableValueTypeInfos[i], testDesc.smallTableValueColumnNames[i], "S", false)); + final int smallTableRetainKeySize = testDesc.smallTableRetainKeyColumnNums.length; + for (int i = 0; i < smallTableRetainKeySize; i++) { + int smallTableKeyColumnNum = testDesc.smallTableRetainKeyColumnNums[i]; + smallTableExpr.add( + new ExprNodeColumnDesc( + testDesc.smallTableTypeInfos[smallTableKeyColumnNum], + testDesc.smallTableColumnNames[smallTableKeyColumnNum], "S", false)); } - keyMap.put((byte)1, smallTableExpr); + + final int smallTableRetainValueSize = testDesc.smallTableRetainValueColumnNums.length; + for (int i = 0; i < smallTableRetainValueSize; i++) { + int smallTableValueColumnNum = + smallTableRetainKeySize + testDesc.smallTableRetainValueColumnNums[i]; + smallTableExpr.add( + new ExprNodeColumnDesc( + testDesc.smallTableTypeInfos[smallTableValueColumnNum], + testDesc.smallTableColumnNames[smallTableValueColumnNum], "S", false)); + } + + keyMap.put((byte) 1, smallTableKeyExpr); + exprMap.put((byte) 1, smallTableExpr); mapJoinDesc.setKeys(keyMap); - mapJoinDesc.setExprs(keyMap); + mapJoinDesc.setExprs(exprMap); Byte[] order = new Byte[] {(byte) 0, (byte) 1}; mapJoinDesc.setTagOrder(order); - mapJoinDesc.setNoOuterJoin(testDesc.vectorMapJoinVariation != VectorMapJoinVariation.OUTER); + mapJoinDesc.setNoOuterJoin( + testDesc.vectorMapJoinVariation != VectorMapJoinVariation.OUTER && + testDesc.vectorMapJoinVariation != VectorMapJoinVariation.FULL_OUTER); Map> filterMap = new HashMap>(); filterMap.put((byte) 0, new ArrayList()); // None. mapJoinDesc.setFilters(filterMap); List bigTableRetainColumnNumsList = intArrayToList(testDesc.bigTableRetainColumnNums); - - // For now, just small table values... - List smallTableRetainColumnNumsList = intArrayToList(testDesc.smallTableRetainValueColumnNums); - Map> retainListMap = new HashMap>(); retainListMap.put((byte) 0, bigTableRetainColumnNumsList); - retainListMap.put((byte) 1, smallTableRetainColumnNumsList); + + // For now, just small table keys/values... + if (testDesc.smallTableRetainKeyColumnNums.length == 0) { + + // Just the value columns numbers with retain. + List smallTableValueRetainColumnNumsList = + intArrayToList(testDesc.smallTableRetainValueColumnNums); + + retainListMap.put((byte) 1, smallTableValueRetainColumnNumsList); + } else { + + // Both the key/value columns numbers. + + // Zero and above numbers indicate a big table key is needed for + // small table result "area". + + // Negative numbers indicate a column to be (deserialize) read from the small table's + // LazyBinary value row. + + ArrayList smallTableValueIndicesNumsList = new ArrayList();; + for (int i = 0; i < testDesc.smallTableRetainKeyColumnNums.length; i++) { + smallTableValueIndicesNumsList.add(testDesc.smallTableRetainKeyColumnNums[i]); + } + for (int i = 0; i < testDesc.smallTableRetainValueColumnNums.length; i++) { + smallTableValueIndicesNumsList.add(-testDesc.smallTableRetainValueColumnNums[i] - 1); + } + int[] smallTableValueIndicesNums = + ArrayUtils.toPrimitive(smallTableValueIndicesNumsList.toArray(new Integer[0])); + + Map valueIndicesMap = new HashMap(); + valueIndicesMap.put((byte) 1, smallTableValueIndicesNums); + mapJoinDesc.setValueIndices(valueIndicesMap); + } mapJoinDesc.setRetainList(retainListMap); + switch (testDesc.mapJoinPlanVariation) { + case DYNAMIC_PARTITION_HASH_JOIN: + // FULL OUTER which behaves differently for dynamic partition hash join. + mapJoinDesc.setDynamicPartitionHashJoin(true); + break; + default: + throw new RuntimeException( + "Unexpected map join plan variation " + testDesc.mapJoinPlanVariation); + } + int joinDescType; switch (testDesc.vectorMapJoinVariation) { case INNER: @@ -141,6 +311,9 @@ public static MapJoinDesc createMapJoinDesc(MapJoinTestDescription testDesc) { case OUTER: joinDescType = JoinDesc.LEFT_OUTER_JOIN; break; + case FULL_OUTER: + joinDescType = JoinDesc.FULL_OUTER_JOIN; + break; default: throw new RuntimeException("unknown operator variation " + testDesc.vectorMapJoinVariation); } @@ -149,12 +322,25 @@ public static MapJoinDesc createMapJoinDesc(MapJoinTestDescription testDesc) { mapJoinDesc.setConds(conds); TableDesc keyTableDesc = PlanUtils.getMapJoinKeyTableDesc(testDesc.hiveConf, PlanUtils - .getFieldSchemasFromColumnList(keyExpr, "")); + .getFieldSchemasFromColumnList(smallTableKeyExpr, "")); mapJoinDesc.setKeyTblDesc(keyTableDesc); + // Small Table expression value columns. + List smallTableValueExpr = new ArrayList(); + + // All Small Table keys and values. + for (int i = 0; i < testDesc.smallTableValueColumnNames.length; i++) { + smallTableValueExpr.add( + new ExprNodeColumnDesc( + testDesc.smallTableValueTypeInfos[i], + testDesc.smallTableValueColumnNames[i], "S", false)); + } + TableDesc valueTableDesc = PlanUtils.getMapJoinValueTableDesc( - PlanUtils.getFieldSchemasFromColumnList(smallTableExpr, "")); + PlanUtils.getFieldSchemasFromColumnList(smallTableValueExpr, "")); ArrayList valueTableDescsList = new ArrayList(); + + // Big Table entry, then Small Table entry. valueTableDescsList.add(null); valueTableDescsList.add(valueTableDesc); mapJoinDesc.setValueTblDescs(valueTableDescsList); @@ -180,6 +366,7 @@ public static VectorMapJoinDesc createVectorMapJoinDesc(MapJoinTestDescription t hashTableKind = HashTableKind.HASH_SET; break; case OUTER: + case FULL_OUTER: hashTableKind = HashTableKind.HASH_MAP; break; default: @@ -190,9 +377,17 @@ public static VectorMapJoinDesc createVectorMapJoinDesc(MapJoinTestDescription t if (testDesc.bigTableKeyTypeInfos.length == 1) { switch (((PrimitiveTypeInfo) testDesc.bigTableKeyTypeInfos[0]).getPrimitiveCategory()) { case BOOLEAN: + hashTableKeyType = HashTableKeyType.BOOLEAN; + break; case BYTE: + hashTableKeyType = HashTableKeyType.BYTE; + break; case SHORT: + hashTableKeyType = HashTableKeyType.SHORT; + break; case INT: + hashTableKeyType = HashTableKeyType.INT; + break; case LONG: hashTableKeyType = HashTableKeyType.LONG; break; @@ -216,49 +411,112 @@ public static VectorMapJoinDesc createVectorMapJoinDesc(MapJoinTestDescription t vectorDesc.setAllBigTableKeyExpressions(null); - vectorMapJoinInfo.setBigTableValueColumnMap(new int[0]); - vectorMapJoinInfo.setBigTableValueColumnNames(new String[0]); - vectorMapJoinInfo.setBigTableValueTypeInfos(new TypeInfo[0]); + vectorMapJoinInfo.setBigTableValueColumnMap(testDesc.bigTableColumnNums); + vectorMapJoinInfo.setBigTableValueColumnNames(testDesc.bigTableColumnNames); + vectorMapJoinInfo.setBigTableValueTypeInfos(testDesc.bigTableTypeInfos); vectorMapJoinInfo.setSlimmedBigTableValueExpressions(null); vectorDesc.setAllBigTableValueExpressions(null); + vectorMapJoinInfo.setBigTableFilterExpressions(new VectorExpression[0]); + + + /* + * Column mapping. + */ + VectorColumnOutputMapping bigTableRetainMapping = + new VectorColumnOutputMapping("Big Table Retain Mapping"); + + VectorColumnOutputMapping nonOuterSmallTableKeyMapping = + new VectorColumnOutputMapping("Non Outer Small Table Key Key Mapping"); + + VectorColumnOutputMapping outerSmallTableKeyMapping = + new VectorColumnOutputMapping("Outer Small Table Key Mapping"); + + VectorColumnSourceMapping fullOuterSmallTableKeyMapping = + new VectorColumnSourceMapping("Full Outer Small Table Key Mapping"); + VectorColumnSourceMapping projectionMapping = new VectorColumnSourceMapping("Projection Mapping"); + int nextOutputColumn = 0; + + final int bigTableRetainedSize = testDesc.bigTableRetainColumnNums.length; + for (int i = 0; i < bigTableRetainedSize; i++) { + final int batchColumnIndex = testDesc.bigTableRetainColumnNums[i]; + TypeInfo typeInfo = testDesc.bigTableTypeInfos[i]; + projectionMapping.add( + nextOutputColumn, batchColumnIndex, typeInfo); + // Collect columns we copy from the big table batch to the overflow batch. + if (!bigTableRetainMapping.containsOutputColumn(batchColumnIndex)) { - VectorColumnOutputMapping bigTableRetainedMapping = - new VectorColumnOutputMapping("Big Table Retained Mapping"); - for (int i = 0; i < testDesc.bigTableTypeInfos.length; i++) { - bigTableRetainedMapping.add(i, i, testDesc.bigTableTypeInfos[i]); - projectionMapping.add(i, i, testDesc.bigTableKeyTypeInfos[i]); + // Tolerate repeated use of a big table column. + bigTableRetainMapping.add(batchColumnIndex, batchColumnIndex, typeInfo); + } + nextOutputColumn++; } - VectorColumnOutputMapping bigTableOuterKeyMapping = - new VectorColumnOutputMapping("Big Table Outer Key Mapping"); + boolean isOuterJoin = + (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.OUTER || + testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER); + + int emulateScratchColumn = testDesc.bigTableTypeInfos.length; + + VectorColumnOutputMapping smallTableKeyOutputMapping = + new VectorColumnOutputMapping("Small Table Key Output Mapping"); + final int smallTableKeyRetainSize = testDesc.smallTableRetainKeyColumnNums.length; + for (int i = 0; i < testDesc.smallTableRetainKeyColumnNums.length; i++) { + final int smallTableKeyColumnNum = testDesc.smallTableRetainKeyColumnNums[i]; + final int bigTableKeyColumnNum = testDesc.bigTableKeyColumnNums[smallTableKeyColumnNum]; + TypeInfo keyTypeInfo = testDesc.smallTableKeyTypeInfos[smallTableKeyColumnNum]; + if (!isOuterJoin) { + // Project the big table key into the small table result "area". + projectionMapping.add(nextOutputColumn, bigTableKeyColumnNum, keyTypeInfo); + if (!bigTableRetainMapping.containsOutputColumn(bigTableKeyColumnNum)) { + nonOuterSmallTableKeyMapping.add(bigTableKeyColumnNum, bigTableKeyColumnNum, keyTypeInfo); + } + } else { + outerSmallTableKeyMapping.add(bigTableKeyColumnNum, emulateScratchColumn, keyTypeInfo); + projectionMapping.add(nextOutputColumn, emulateScratchColumn, keyTypeInfo); + + // For FULL OUTER MapJoin, we need to be able to deserialize a Small Table key + // into the output result. + fullOuterSmallTableKeyMapping.add(smallTableKeyColumnNum, emulateScratchColumn, keyTypeInfo); + emulateScratchColumn++; + } + nextOutputColumn++; + } // The order of the fields in the LazyBinary small table value must be used, so // we use the source ordering flavor for the mapping. - VectorColumnSourceMapping smallTableMapping = - new VectorColumnSourceMapping("Small Table Mapping"); - int outputColumn = testDesc.bigTableTypeInfos.length; + VectorColumnSourceMapping smallTableValueMapping = + new VectorColumnSourceMapping("Small Table Value Mapping"); for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) { - smallTableMapping.add(i, outputColumn, testDesc.smallTableValueTypeInfos[i]); - projectionMapping.add(outputColumn, outputColumn, testDesc.smallTableValueTypeInfos[i]); - outputColumn++; + smallTableValueMapping.add(i, emulateScratchColumn, testDesc.smallTableValueTypeInfos[i]); + projectionMapping.add(nextOutputColumn, emulateScratchColumn, testDesc.smallTableValueTypeInfos[i]); + emulateScratchColumn++; + nextOutputColumn++; } // Convert dynamic arrays and maps to simple arrays. - bigTableRetainedMapping.finalize(); + bigTableRetainMapping.finalize(); + vectorMapJoinInfo.setBigTableRetainColumnMap(bigTableRetainMapping.getOutputColumns()); + vectorMapJoinInfo.setBigTableRetainTypeInfos(bigTableRetainMapping.getTypeInfos()); - bigTableOuterKeyMapping.finalize(); + nonOuterSmallTableKeyMapping.finalize(); + vectorMapJoinInfo.setNonOuterSmallTableKeyColumnMap(nonOuterSmallTableKeyMapping.getOutputColumns()); + vectorMapJoinInfo.setNonOuterSmallTableKeyTypeInfos(nonOuterSmallTableKeyMapping.getTypeInfos()); - smallTableMapping.finalize(); + outerSmallTableKeyMapping.finalize(); + fullOuterSmallTableKeyMapping.finalize(); - vectorMapJoinInfo.setBigTableRetainedMapping(bigTableRetainedMapping); - vectorMapJoinInfo.setBigTableOuterKeyMapping(bigTableOuterKeyMapping); - vectorMapJoinInfo.setSmallTableMapping(smallTableMapping); + vectorMapJoinInfo.setOuterSmallTableKeyMapping(outerSmallTableKeyMapping); + vectorMapJoinInfo.setFullOuterSmallTableKeyMapping(fullOuterSmallTableKeyMapping); + + smallTableValueMapping.finalize(); + + vectorMapJoinInfo.setSmallTableValueMapping(smallTableValueMapping); projectionMapping.finalize(); @@ -267,7 +525,9 @@ public static VectorMapJoinDesc createVectorMapJoinDesc(MapJoinTestDescription t vectorMapJoinInfo.setProjectionMapping(projectionMapping); - assert projectionMapping.getCount() == testDesc.outputColumnNames.length; + if (projectionMapping.getCount() != testDesc.outputColumnNames.length) { + throw new RuntimeException(); + }; vectorDesc.setVectorMapJoinInfo(vectorMapJoinInfo); @@ -306,6 +566,11 @@ public static VectorMapJoinCommonOperator createNativeVectorMapJoinOperator( new VectorMapJoinOuterLongOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorDesc); break; + case FULL_OUTER: + operator = + new VectorMapJoinFullOuterLongOperator(new CompilationOpContext(), + mapJoinDesc, vContext, vectorDesc); + break; default: throw new RuntimeException("unknown operator variation " + VectorMapJoinVariation); } @@ -331,6 +596,10 @@ public static VectorMapJoinCommonOperator createNativeVectorMapJoinOperator( operator = new VectorMapJoinOuterStringOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorDesc); + case FULL_OUTER: + operator = + new VectorMapJoinFullOuterStringOperator(new CompilationOpContext(), + mapJoinDesc, vContext, vectorDesc); break; default: throw new RuntimeException("unknown operator variation " + VectorMapJoinVariation); @@ -358,6 +627,11 @@ public static VectorMapJoinCommonOperator createNativeVectorMapJoinOperator( new VectorMapJoinOuterMultiKeyOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorDesc); break; + case FULL_OUTER: + operator = + new VectorMapJoinFullOuterMultiKeyOperator(new CompilationOpContext(), + mapJoinDesc, vContext, vectorDesc); + break; default: throw new RuntimeException("unknown operator variation " + VectorMapJoinVariation); } @@ -365,16 +639,31 @@ public static VectorMapJoinCommonOperator createNativeVectorMapJoinOperator( default: throw new RuntimeException("Unknown hash table key type " + vectorDesc.getHashTableKeyType()); } + System.out.println("*BENCHMARK* createNativeVectorMapJoinOperator " + + operator.getClass().getSimpleName()); return operator; } public static VectorizationContext createVectorizationContext(MapJoinTestDescription testDesc) throws HiveException { VectorizationContext vContext = - new VectorizationContext("test", testDesc.bigTableColumnNamesList); + new VectorizationContext("test", testDesc.bigTableColumnNameList); + + boolean isOuterJoin = + (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.OUTER || + testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER); + + if (isOuterJoin) { + + // We need physical columns. + for (int i = 0; i < testDesc.smallTableRetainKeyColumnNums.length; i++) { + final int smallTableKeyRetainColumnNum = testDesc.smallTableRetainKeyColumnNums[i]; + vContext.allocateScratchColumn(testDesc.smallTableKeyTypeInfos[smallTableKeyRetainColumnNum]); + } + } // Create scratch columns to hold small table results. - for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) { + for (int i = 0; i < testDesc.smallTableRetainValueColumnNums.length; i++) { vContext.allocateScratchColumn(testDesc.smallTableValueTypeInfos[i]); } return vContext; @@ -390,19 +679,19 @@ public static MapJoinTableContainerSerDe createMapJoinTableContainerSerDe(MapJoi final Byte smallTablePos = 1; - // UNDONE: Why do we need to specify BinarySortableSerDe explicitly here??? TableDesc keyTableDesc = mapJoinDesc.getKeyTblDesc(); AbstractSerDe keySerializer = (AbstractSerDe) ReflectionUtil.newInstance( BinarySortableSerDe.class, null); SerDeUtils.initializeSerDe(keySerializer, null, keyTableDesc.getProperties(), null); MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerializer, false); - TableDesc valueTableDesc; + final List valueTableDescList; if (mapJoinDesc.getNoOuterJoin()) { - valueTableDesc = mapJoinDesc.getValueTblDescs().get(smallTablePos); + valueTableDescList = mapJoinDesc.getValueTblDescs(); } else { - valueTableDesc = mapJoinDesc.getValueFilteredTblDescs().get(smallTablePos); + valueTableDescList = mapJoinDesc.getValueFilteredTblDescs(); } + TableDesc valueTableDesc = valueTableDescList.get(smallTablePos); AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtil.newInstance( valueTableDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null); @@ -414,16 +703,19 @@ public static MapJoinTableContainerSerDe createMapJoinTableContainerSerDe(MapJoi } public static void connectOperators( - MapJoinTestDescription testDesc, Operator operator, - Operator testCollectorOperator) throws HiveException { - Operator[] parents = new Operator[] {operator}; - testCollectorOperator.setParentOperators(Arrays.asList(parents)); - Operator[] childOperators = new Operator[] {testCollectorOperator}; - operator.setChildOperators(Arrays.asList(childOperators)); - HiveConf.setBoolVar(testDesc.hiveConf, - HiveConf.ConfVars.HIVE_MAPJOIN_TESTING_NO_HASH_TABLE_LOAD, true); - operator.initialize(testDesc.hiveConf, testDesc.inputObjectInspectors); + Operator childOperator) throws HiveException { + + List> newParentOperators = newOperatorList(); + newParentOperators.addAll(childOperator.getParentOperators()); + newParentOperators.add(operator); + childOperator.setParentOperators(newParentOperators); + + List> newChildOperators = newOperatorList(); + newChildOperators.addAll(operator.getChildOperators()); + newChildOperators.add(childOperator); + operator.setChildOperators(newChildOperators); + } private static List intArrayToList(int[] intArray) { @@ -509,9 +801,25 @@ private static void loadTableContainerData(MapJoinTestDescription testDesc, MapJ mapJoinTableContainer.seal(); } - public static MapJoinOperator createMapJoin(MapJoinTestDescription testDesc, - Operator collectorOperator, MapJoinTestData testData, - MapJoinDesc mapJoinDesc, boolean isVectorMapJoin, boolean isOriginalMapJoin) + public static class CreateMapJoinResult { + public final MapJoinOperator mapJoinOperator; + public final MapJoinTableContainer mapJoinTableContainer; + public final MapJoinTableContainerSerDe mapJoinTableContainerSerDe; + + public CreateMapJoinResult( + MapJoinOperator mapJoinOperator, + MapJoinTableContainer mapJoinTableContainer, + MapJoinTableContainerSerDe mapJoinTableContainerSerDe) { + this.mapJoinOperator = mapJoinOperator; + this.mapJoinTableContainer = mapJoinTableContainer; + this.mapJoinTableContainerSerDe = mapJoinTableContainerSerDe; + } + } + public static CreateMapJoinResult createMapJoin( + MapJoinTestDescription testDesc, + MapJoinTestData testData, + MapJoinDesc mapJoinDesc, boolean isVectorMapJoin, boolean isOriginalMapJoin, + MapJoinTableContainer shareMapJoinTableContainer) throws SerDeException, IOException, HiveException { final Byte bigTablePos = 0; @@ -539,11 +847,16 @@ public static MapJoinOperator createMapJoin(MapJoinTestDescription testDesc, operator = new MapJoinOperator(new CompilationOpContext()); operator.setConf(mapJoinDesc); } else { - VectorizationContext vContext = new VectorizationContext("test", testDesc.bigTableColumnNamesList); + VectorizationContext vContext = + new VectorizationContext("test", testDesc.bigTableColumnNameList); + + /* + // UNDONE: Unclear this belonds in the input VectorizationContext... // Create scratch columns to hold small table results. for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) { vContext.allocateScratchColumn(testDesc.smallTableValueTypeInfos[i]); } + */ // This is what the Vectorizer class does. VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc(); @@ -571,21 +884,20 @@ public static MapJoinOperator createMapJoin(MapJoinTestDescription testDesc, } } - MapJoinTestConfig.connectOperators(testDesc, operator, collectorOperator); - - operator.setTestMapJoinTableContainer(1, mapJoinTableContainer, mapJoinTableContainerSerDe); + HiveConf.setBoolVar(testDesc.hiveConf, + HiveConf.ConfVars.HIVE_MAPJOIN_TESTING_NO_HASH_TABLE_LOAD, true); - return operator; + return new CreateMapJoinResult(operator, mapJoinTableContainer, mapJoinTableContainerSerDe); } - public static MapJoinOperator createNativeVectorMapJoin(MapJoinTestDescription testDesc, - Operator collectorOperator, MapJoinTestData testData, - MapJoinDesc mapJoinDesc, HashTableImplementationType hashTableImplementationType) + public static CreateMapJoinResult createNativeVectorMapJoin( + MapJoinTestDescription testDesc, + MapJoinTestData testData, + MapJoinDesc mapJoinDesc, HashTableImplementationType hashTableImplementationType, + MapJoinTableContainer shareMapJoinTableContainer) throws SerDeException, IOException, HiveException { VectorMapJoinDesc vectorDesc = MapJoinTestConfig.createVectorMapJoinDesc(testDesc); - - // UNDONE mapJoinDesc.setVectorDesc(vectorDesc); vectorDesc.setHashTableImplementationType(hashTableImplementationType); @@ -593,13 +905,14 @@ public static MapJoinOperator createNativeVectorMapJoin(MapJoinTestDescription t VectorMapJoinInfo vectorMapJoinInfo = vectorDesc.getVectorMapJoinInfo(); MapJoinTableContainer mapJoinTableContainer; + MapJoinTableContainerSerDe mapJoinTableContainerSerDe = null; switch (vectorDesc.getHashTableImplementationType()) { case OPTIMIZED: mapJoinTableContainer = new MapJoinBytesTableContainer( testDesc.hiveConf, null, testData.smallTableKeyHashMap.size(), 0); - MapJoinTableContainerSerDe mapJoinTableContainerSerDe = + mapJoinTableContainerSerDe = MapJoinTestConfig.createMapJoinTableContainerSerDe(mapJoinDesc); mapJoinTableContainer.setSerde( @@ -615,7 +928,11 @@ public static MapJoinOperator createNativeVectorMapJoin(MapJoinTestDescription t throw new RuntimeException("Unexpected hash table implementation type " + vectorDesc.getHashTableImplementationType()); } - loadTableContainerData(testDesc, testData, mapJoinTableContainer); +// if (shareMapJoinTableContainer == null) { + loadTableContainerData(testDesc, testData, mapJoinTableContainer); +// } else { +// setTableContainerData(mapJoinTableContainer, shareMapJoinTableContainer); +// } VectorizationContext vContext = MapJoinTestConfig.createVectorizationContext(testDesc); @@ -636,56 +953,295 @@ public static MapJoinOperator createNativeVectorMapJoin(MapJoinTestDescription t vectorDesc, vContext); - MapJoinTestConfig.connectOperators(testDesc, operator, collectorOperator); + HiveConf.setBoolVar(testDesc.hiveConf, + HiveConf.ConfVars.HIVE_MAPJOIN_TESTING_NO_HASH_TABLE_LOAD, true); - operator.setTestMapJoinTableContainer(1, mapJoinTableContainer, null); + return new CreateMapJoinResult(operator, mapJoinTableContainer, mapJoinTableContainerSerDe); + } - return operator; + public static CreateMapJoinResult createMapJoinImplementation( + MapJoinTestImplementation mapJoinImplementation, + MapJoinTestDescription testDesc, + MapJoinTestData testData, + MapJoinDesc mapJoinDesc) + throws SerDeException, IOException, HiveException { + return createMapJoinImplementation( + mapJoinImplementation, testDesc, testData, mapJoinDesc, null); } - public static MapJoinOperator createMapJoinImplementation(MapJoinTestImplementation mapJoinImplementation, + public static CreateMapJoinResult createMapJoinImplementation( + MapJoinTestImplementation mapJoinImplementation, MapJoinTestDescription testDesc, - Operator testCollectorOperator, MapJoinTestData testData, - MapJoinDesc mapJoinDesc) throws SerDeException, IOException, HiveException { + MapJoinTestData testData, + MapJoinDesc mapJoinDesc, + MapJoinTableContainer shareMapJoinTableContainer) + throws SerDeException, IOException, HiveException { - MapJoinOperator operator; + CreateMapJoinResult result; switch (mapJoinImplementation) { case ROW_MODE_HASH_MAP: // MapJoinOperator - operator = MapJoinTestConfig.createMapJoin( - testDesc, testCollectorOperator, testData, mapJoinDesc, /* isVectorMapJoin */ false, - /* isOriginalMapJoin */ true); + result = MapJoinTestConfig.createMapJoin( + testDesc, testData, mapJoinDesc, /* isVectorMapJoin */ false, + /* isOriginalMapJoin */ true, + shareMapJoinTableContainer); break; case ROW_MODE_OPTIMIZED: // MapJoinOperator - operator = MapJoinTestConfig.createMapJoin( - testDesc, testCollectorOperator, testData, mapJoinDesc, /* isVectorMapJoin */ false, - /* isOriginalMapJoin */ false); + result = MapJoinTestConfig.createMapJoin( + testDesc, testData, mapJoinDesc, /* isVectorMapJoin */ false, + /* isOriginalMapJoin */ false, + shareMapJoinTableContainer); break; case VECTOR_PASS_THROUGH: // VectorMapJoinOperator - operator = MapJoinTestConfig.createMapJoin( - testDesc, testCollectorOperator, testData, mapJoinDesc, /* isVectorMapJoin */ true, - /* n/a */ false); + result = MapJoinTestConfig.createMapJoin( + testDesc, testData, mapJoinDesc, /* isVectorMapJoin */ true, + /* n/a */ false, + shareMapJoinTableContainer); break; case NATIVE_VECTOR_OPTIMIZED: - operator = MapJoinTestConfig.createNativeVectorMapJoin( - testDesc, testCollectorOperator, testData, mapJoinDesc, HashTableImplementationType.OPTIMIZED); + result = MapJoinTestConfig.createNativeVectorMapJoin( + testDesc, testData, mapJoinDesc, + HashTableImplementationType.OPTIMIZED, + shareMapJoinTableContainer); break; case NATIVE_VECTOR_FAST: - operator = MapJoinTestConfig.createNativeVectorMapJoin( - testDesc, testCollectorOperator, testData, mapJoinDesc, HashTableImplementationType.FAST); + result = MapJoinTestConfig.createNativeVectorMapJoin( + testDesc, testData, mapJoinDesc, + HashTableImplementationType.FAST, + shareMapJoinTableContainer); break; default: throw new RuntimeException("Unexpected MapJoin Operator Implementation " + mapJoinImplementation); } - return operator; + return result; + } + + private static Operator makeInterceptSelectOperator( + MapJoinOperator mapJoinOperator, int bigTableKeySize, int bigTableRetainSize, + String[] outputColumnNames, TypeInfo[] outputTypeInfos) { + + MapJoinDesc mapJoinDesc = (MapJoinDesc) mapJoinOperator.getConf(); + + List selectExprList = new ArrayList(); + List selectOutputColumnNameList = new ArrayList(); + for (int i = 0; i < bigTableRetainSize; i++) { + String selectOutputColumnName = HiveConf.getColumnInternalName(i); + selectOutputColumnNameList.add(selectOutputColumnName); + + TypeInfo outputTypeInfo = outputTypeInfos[i]; + if (i < bigTableKeySize) { + + // Big Table key. + ExprNodeColumnDesc keyColumnExpr = + new ExprNodeColumnDesc( + outputTypeInfo, + outputColumnNames[i], "test", false); + selectExprList.add(keyColumnExpr); + } else { + + // For row-mode, substitute NULL constant for any non-key extra Big Table columns. + ExprNodeConstantDesc nullExtraColumnExpr = + new ExprNodeConstantDesc( + outputTypeInfo, + null); + nullExtraColumnExpr.setFoldedFromCol(outputColumnNames[i]); + selectExprList.add(nullExtraColumnExpr); + } + } + + SelectDesc selectDesc = new SelectDesc(selectExprList, selectOutputColumnNameList); + Operator selectOperator = + OperatorFactory.get(new CompilationOpContext(), selectDesc); + + return selectOperator; + } + + private static Operator vectorizeInterceptSelectOperator( + MapJoinOperator mapJoinOperator, int bigTableKeySize, int bigTableRetainSize, + Operator selectOperator) throws HiveException{ + + MapJoinDesc mapJoinDesc = (MapJoinDesc) mapJoinOperator.getConf(); + + VectorizationContext vOutContext = + ((VectorizationContextRegion) mapJoinOperator).getOutputVectorizationContext(); + + SelectDesc selectDesc = (SelectDesc) selectOperator.getConf(); + List selectExprs = selectDesc.getColList(); + + VectorExpression[] selectVectorExpr = new VectorExpression[bigTableRetainSize]; + for (int i = 0; i < bigTableRetainSize; i++) { + + TypeInfo typeInfo = selectExprs.get(i).getTypeInfo(); + if (i < bigTableKeySize) { + + // Big Table key. + selectVectorExpr[i] = vOutContext.getVectorExpression(selectExprs.get(i)); + } else { + + // For vector-mode, for test purposes we substitute a NO-OP (we don't want to modify + // the batch). + + // FULL OUTER INTERCEPT does not look at non-key columns. + + NoOpExpression noOpExpression = new NoOpExpression(i); + + noOpExpression.setInputTypeInfos(typeInfo); + noOpExpression.setInputDataTypePhysicalVariations(DataTypePhysicalVariation.NONE); + + noOpExpression.setOutputTypeInfo(typeInfo); + noOpExpression.setOutputDataTypePhysicalVariation(DataTypePhysicalVariation.NONE); + + selectVectorExpr[i] = noOpExpression; + } + } + + System.out.println("*BENCHMARK* VectorSelectOperator selectVectorExpr " + + Arrays.toString(selectVectorExpr)); + + int[] projectedColumns = + ArrayUtils.toPrimitive( + vOutContext.getProjectedColumns().subList(0, bigTableRetainSize). + toArray(new Integer[0])); + System.out.println("*BENCHMARK* VectorSelectOperator projectedColumns " + + Arrays.toString(projectedColumns)); + + VectorSelectDesc vectorSelectDesc = new VectorSelectDesc(); + vectorSelectDesc.setSelectExpressions(selectVectorExpr); + vectorSelectDesc.setProjectedOutputColumns(projectedColumns); + + Operator vectorSelectOperator = OperatorFactory.getVectorOperator( + selectOperator.getCompilationOpContext(), selectDesc, + vOutContext, vectorSelectDesc); + + return vectorSelectOperator; + } + + public static CountCollectorTestOperator addFullOuterIntercept( + MapJoinTestImplementation mapJoinImplementation, + MapJoinTestDescription testDesc, + RowTestObjectsMultiSet outputTestRowMultiSet, MapJoinTestData testData, + MapJoinOperator mapJoinOperator, MapJoinTableContainer mapJoinTableContainer, + MapJoinTableContainerSerDe mapJoinTableContainerSerDe) + throws SerDeException, IOException, HiveException { + + MapJoinDesc mapJoinDesc = (MapJoinDesc) mapJoinOperator.getConf(); + + // For FULL OUTER MapJoin, we require all Big Keys to be present in the output result. + // The first N output columns are the Big Table key columns. + Map> keyMap = mapJoinDesc.getKeys(); + List bigTableKeyExprs = keyMap.get((byte) 0); + final int bigTableKeySize = bigTableKeyExprs.size(); + + Map> retainMap = mapJoinDesc.getRetainList(); + List bigTableRetainList = retainMap.get((byte) 0); + final int bigTableRetainSize = bigTableRetainList.size(); + + List outputColumnNameList = mapJoinDesc.getOutputColumnNames(); + String[] mapJoinOutputColumnNames = outputColumnNameList.toArray(new String[0]); + + // Use a utility method to get the MapJoin output TypeInfo. + TypeInfo[] mapJoinOutputTypeInfos = VectorMapJoinBaseOperator.getOutputTypeInfos(mapJoinDesc); + + final boolean isVectorOutput = MapJoinTestConfig.isVectorOutput(mapJoinImplementation); + + /* + * Always create a row-mode SelectOperator. If we are vector-mode, next we will use its + * expressions and replace it with a VectorSelectOperator. + */ + Operator selectOperator = + makeInterceptSelectOperator( + mapJoinOperator, bigTableKeySize, bigTableRetainSize, + mapJoinOutputColumnNames, mapJoinOutputTypeInfos); + + List selectOutputColumnNameList = + ((SelectDesc) selectOperator.getConf()).getOutputColumnNames(); + String[] selectOutputColumnNames = + selectOutputColumnNameList.toArray(new String[0]); + + if (isVectorOutput) { + selectOperator = + vectorizeInterceptSelectOperator( + mapJoinOperator, bigTableKeySize, bigTableRetainSize, selectOperator); + } + + /* + * Create test description just for FULL OUTER INTERCEPT with different + */ + MapJoinTestDescription interceptTestDesc = + new MapJoinTestDescription( + testDesc.hiveConf, testDesc.vectorMapJoinVariation, + selectOutputColumnNames, + Arrays.copyOf(mapJoinOutputTypeInfos, bigTableRetainSize), + testDesc.bigTableKeyColumnNums, + testDesc.smallTableValueTypeInfos, + testDesc.smallTableRetainKeyColumnNums, + testDesc.smallTableGenerationParameters, + testDesc.mapJoinPlanVariation); + + MapJoinDesc intersectMapJoinDesc = + createMapJoinDesc(interceptTestDesc, /* isFullOuterIntersect */ true); + + /* + * Create FULL OUTER INTERSECT MapJoin operator. + */ + CreateMapJoinResult interceptCreateMapJoinResult = + createMapJoinImplementation( + mapJoinImplementation, interceptTestDesc, testData, intersectMapJoinDesc); + MapJoinOperator intersectMapJoinOperator = + interceptCreateMapJoinResult.mapJoinOperator; + MapJoinTableContainer intersectMapJoinTableContainer = + interceptCreateMapJoinResult.mapJoinTableContainer; + MapJoinTableContainerSerDe interceptMapJoinTableContainerSerDe = + interceptCreateMapJoinResult.mapJoinTableContainerSerDe; + + connectOperators(mapJoinOperator, selectOperator); + + connectOperators(selectOperator, intersectMapJoinOperator); + + CountCollectorTestOperator interceptTestCollectorOperator; + if (!isVectorOutput) { + interceptTestCollectorOperator = + new TestMultiSetCollectorOperator( + interceptTestDesc.outputObjectInspectors, outputTestRowMultiSet); + } else { + VectorizationContext vContext = + ((VectorizationContextRegion) intersectMapJoinOperator).getOutputVectorizationContext(); + int[] intersectProjectionColumns = + ArrayUtils.toPrimitive(vContext.getProjectedColumns().toArray(new Integer[0])); + interceptTestCollectorOperator = + new TestMultiSetVectorCollectorOperator( + intersectProjectionColumns, + interceptTestDesc.outputTypeInfos, + interceptTestDesc.outputObjectInspectors, outputTestRowMultiSet); + } + + connectOperators(intersectMapJoinOperator, interceptTestCollectorOperator); + + // Setup the FULL OUTER INTERSECT MapJoin's inputObjInspector to include the Small Table, etc. + intersectMapJoinOperator.setInputObjInspectors(interceptTestDesc.inputObjectInspectors); + + // Now, invoke initializeOp methods from the root MapJoin operator. + mapJoinOperator.initialize(testDesc.hiveConf, testDesc.inputObjectInspectors); + + // Fixup the mapJoinTables container references to our test data. + mapJoinOperator.setTestMapJoinTableContainer( + 1, mapJoinTableContainer, mapJoinTableContainerSerDe); + intersectMapJoinOperator.setTestMapJoinTableContainer( + 1, intersectMapJoinTableContainer, interceptMapJoinTableContainerSerDe); + + return interceptTestCollectorOperator; + } + + private static List> newOperatorList() { + return new ArrayList>(); } } \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestData.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestData.java index d763695d3b..2f5479e127 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestData.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestData.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Random; import java.util.Map.Entry; @@ -27,16 +28,15 @@ import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow; +import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource; import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource; +import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec; import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator; -import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType; -import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; -import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerateStream; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters.ValueOption; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; @@ -44,107 +44,170 @@ public class MapJoinTestData { - final long bigTableRandomSeed; - final long smallTableRandomSeed; + final Random random; - final GenerateType[] generateTypes; - final VectorBatchGenerator generator; + final List generationSpecList; + final VectorRandomRowSource bigTableRowSource; + final Object[][] bigTableRandomRows; + final VectorRandomBatchSource bigTableBatchSource; public final VectorizedRowBatch bigTableBatch; - public final VectorBatchGenerateStream bigTableBatchStream; - final SmallTableGenerationParameters smallTableGenerationParameters; HashMap smallTableKeyHashMap; + List fullOuterAdditionalSmallTableKeys; + ArrayList smallTableValueCounts; ArrayList> smallTableValues; public MapJoinTestData(int rowCount, MapJoinTestDescription testDesc, - long bigTableRandomSeed, long smallTableRandomSeed) throws HiveException { + long randomSeed) throws HiveException { + + random = new Random(randomSeed); + + boolean isOuterJoin = + (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.OUTER || + testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER); + + generationSpecList = generationSpecListFromTypeInfos( + testDesc.bigTableTypeInfos, + testDesc.bigTableKeyColumnNums.length, + isOuterJoin); - this.bigTableRandomSeed = bigTableRandomSeed; + bigTableRowSource = new VectorRandomRowSource(); - this.smallTableRandomSeed = smallTableRandomSeed; + bigTableRowSource.initGenerationSpecSchema( + random, generationSpecList, /* maxComplexDepth */ 0, + /* allowNull */ true, /* isUnicodeOk */ true, null); - generateTypes = generateTypesFromTypeInfos(testDesc.bigTableTypeInfos); - generator = new VectorBatchGenerator(generateTypes); + // UNDONE: 100000 + bigTableRandomRows = bigTableRowSource.randomRows(10); - bigTableBatch = generator.createBatch(); + bigTableBatchSource = + VectorRandomBatchSource.createInterestingBatches( + random, + bigTableRowSource, + bigTableRandomRows, + null); + + bigTableBatch = createBigTableBatch(testDesc); // Add small table result columns. - ColumnVector[] newCols = new ColumnVector[bigTableBatch.cols.length + testDesc.smallTableValueTypeInfos.length]; + + // Only [FULL] OUTER MapJoin needs a physical column. + final int smallTableRetainKeySize = + (isOuterJoin ? testDesc.smallTableRetainKeyColumnNums.length : 0); + ColumnVector[] newCols = + new ColumnVector[ + bigTableBatch.cols.length + + smallTableRetainKeySize + + testDesc.smallTableValueTypeInfos.length]; System.arraycopy(bigTableBatch.cols, 0, newCols, 0, bigTableBatch.cols.length); + int colIndex = bigTableBatch.cols.length; + + if (isOuterJoin) { + for (int s = 0; s < smallTableRetainKeySize; s++) { + final int smallTableKeyColumnNum = testDesc.smallTableRetainKeyColumnNums[s]; + newCols[colIndex++] = + VectorizedBatchUtil.createColumnVector( + testDesc.smallTableKeyTypeInfos[smallTableKeyColumnNum]); + } + } for (int s = 0; s < testDesc.smallTableValueTypeInfos.length; s++) { - newCols[bigTableBatch.cols.length + s] = + newCols[colIndex++] = VectorizedBatchUtil.createColumnVector(testDesc.smallTableValueTypeInfos[s]); } bigTableBatch.cols = newCols; bigTableBatch.numCols = newCols.length; - - // This stream will be restarted with the same random seed over and over. - bigTableBatchStream = new VectorBatchGenerateStream( - bigTableRandomSeed, generator, rowCount); - VectorExtractRow vectorExtractRow = new VectorExtractRow(); - vectorExtractRow.init(testDesc.bigTableKeyTypeInfos); + VectorExtractRow keyVectorExtractRow = new VectorExtractRow(); + keyVectorExtractRow.init(testDesc.bigTableKeyTypeInfos, testDesc.bigTableKeyColumnNums); smallTableGenerationParameters = testDesc.getSmallTableGenerationParameters(); + HashMap bigTableKeyHashMap = new HashMap(); smallTableKeyHashMap = new HashMap(); - Random smallTableRandom = new Random(smallTableRandomSeed); - // Start small table random generation - // from beginning. ValueOption valueOption = smallTableGenerationParameters.getValueOption(); - int keyOutOfAThousand = smallTableGenerationParameters.getKeyOutOfAThousand(); - - bigTableBatchStream.reset(); - while (bigTableBatchStream.isNext()) { - bigTableBatch.reset(); - bigTableBatchStream.fillNext(bigTableBatch); + if (valueOption != ValueOption.NO_REGULAR_SMALL_KEYS) { + int keyOutOfAThousand = smallTableGenerationParameters.getKeyOutOfAThousand(); - final int size = bigTableBatch.size; - for (int i = 0; i < size; i++) { + bigTableBatchSource.resetBatchIteration(); + while (bigTableBatchSource.fillNextBatch(bigTableBatch)) { - if (smallTableRandom.nextInt(1000) <= keyOutOfAThousand) { + final int size = bigTableBatch.size; + for (int logical = 0; logical < size; logical++) { + final int batchIndex = + (bigTableBatch.selectedInUse ? bigTableBatch.selected[logical] : logical); - RowTestObjects testKey = getTestKey(bigTableBatch, i, vectorExtractRow, + RowTestObjects testKey = getTestKey(bigTableBatch, batchIndex, keyVectorExtractRow, testDesc.bigTableKeyTypeInfos.length, testDesc.bigTableObjectInspectors); + bigTableKeyHashMap.put((RowTestObjects) testKey.clone(), -1); - if (valueOption == ValueOption.ONLY_ONE) { - if (smallTableKeyHashMap.containsKey(testKey)) { - continue; + if (random.nextInt(1000) <= keyOutOfAThousand) { + + if (valueOption == ValueOption.ONLY_ONE) { + if (smallTableKeyHashMap.containsKey(testKey)) { + continue; + } } + smallTableKeyHashMap.put((RowTestObjects) testKey.clone(), -1); } - smallTableKeyHashMap.put((RowTestObjects) testKey.clone(), -1); } } } //--------------------------------------------------------------------------------------------- - // UNDONE: For now, don't add more small keys... - /* - // Add more small table keys that are not in Big Table batches. - final int smallTableAdditionalLength = 1 + random.nextInt(4); - final int smallTableAdditionalSize = smallTableAdditionalLength * maxBatchSize; - VectorizedRowBatch[] smallTableAdditionalBatches = createBigTableBatches(generator, smallTableAdditionalLength); - for (int i = 0; i < smallTableAdditionalLength; i++) { - generator.generateBatch(smallTableAdditionalBatches[i], random, maxBatchSize); + // Add more small table keys that are not in Big Table or Small Table for FULL OUTER. + + fullOuterAdditionalSmallTableKeys = new ArrayList(); + + VectorRandomRowSource altBigTableRowSource = new VectorRandomRowSource(); + + altBigTableRowSource.initGenerationSpecSchema( + random, generationSpecList, /* maxComplexDepth */ 0, + /* allowNull */ true, /* isUnicodeOk */ true, null); + + Object[][] altBigTableRandomRows = altBigTableRowSource.randomRows(10000); + + VectorRandomBatchSource altBigTableBatchSource = + VectorRandomBatchSource.createInterestingBatches( + random, + altBigTableRowSource, + altBigTableRandomRows, + null); + + altBigTableBatchSource.resetBatchIteration(); + while (altBigTableBatchSource.fillNextBatch(bigTableBatch)) { + final int size = bigTableBatch.size; + for (int logical = 0; logical < size; logical++) { + final int batchIndex = + (bigTableBatch.selectedInUse ? bigTableBatch.selected[logical] : logical); + RowTestObjects testKey = getTestKey(bigTableBatch, batchIndex, keyVectorExtractRow, + testDesc.bigTableKeyTypeInfos.length, + testDesc.bigTableObjectInspectors); + if (bigTableKeyHashMap.containsKey(testKey) || + smallTableKeyHashMap.containsKey(testKey)) { + continue; + } + RowTestObjects testKeyClone = (RowTestObjects) testKey.clone(); + smallTableKeyHashMap.put(testKeyClone, -1); + fullOuterAdditionalSmallTableKeys.add(testKeyClone); + } } - TestRow[] additionalTestKeys = getTestKeys(smallTableAdditionalBatches, vectorExtractRow, - testDesc.bigTableKeyTypeInfos.length, testDesc.bigTableObjectInspectors); - final int smallTableAdditionKeyProbes = smallTableAdditionalSize / 2; - for (int i = 0; i < smallTableAdditionKeyProbes; i++) { - int index = random.nextInt(smallTableAdditionalSize); - TestRow additionalTestKey = additionalTestKeys[index]; - smallTableKeyHashMap.put((TestRow) additionalTestKey.clone(), -1); + + // Make sure there is a NULL key. + Object[] nullKeyRowObjects = new Object[testDesc.bigTableKeyTypeInfos.length]; + RowTestObjects nullTestKey = new RowTestObjects(nullKeyRowObjects); + if (!smallTableKeyHashMap.containsKey(nullTestKey)) { + smallTableKeyHashMap.put(nullTestKey, -1); + fullOuterAdditionalSmallTableKeys.add(nullTestKey); } - */ // Number the test rows with collection order. int addCount = 0; @@ -152,17 +215,28 @@ public MapJoinTestData(int rowCount, MapJoinTestDescription testDesc, testRowEntry.setValue(addCount++); } - generateVariationData(this, testDesc, smallTableRandom); + generateVariationData(this, testDesc, random); } - public VectorBatchGenerateStream getBigTableBatchStream() { - return bigTableBatchStream; + public VectorRandomBatchSource getBigTableBatchSource() { + return bigTableBatchSource; } public VectorizedRowBatch getBigTableBatch() { return bigTableBatch; } + public VectorizedRowBatch createBigTableBatch(MapJoinTestDescription testDesc) { + final int bigTableColumnCount = testDesc.bigTableTypeInfos.length; + VectorizedRowBatch batch = new VectorizedRowBatch(bigTableColumnCount); + for (int i = 0; i < bigTableColumnCount; i++) { + batch.cols[i] = + VectorizedBatchUtil.createColumnVector( + testDesc.bigTableTypeInfos[i]); + } + return batch; + } + private RowTestObjects getTestKey(VectorizedRowBatch bigTableBatch, int batchIndex, VectorExtractRow vectorExtractRow, int columnCount, ObjectInspector[] objectInspectors) { Object[] rowObjects = new Object[columnCount]; @@ -177,37 +251,29 @@ public static void driveBigTableData(MapJoinTestDescription testDesc, MapJoinTes MapJoinOperator operator) throws HiveException { VectorExtractRow vectorExtractRow = new VectorExtractRow(); - vectorExtractRow.init(testDesc.bigTableKeyTypeInfos); - - final int columnCount = testDesc.bigTableKeyTypeInfos.length; - Object[] row = new Object[columnCount]; + vectorExtractRow.init(testDesc.bigTableTypeInfos); - testData.bigTableBatchStream.reset(); - while (testData.bigTableBatchStream.isNext()) { - testData.bigTableBatch.reset(); - testData.bigTableBatchStream.fillNext(testData.bigTableBatch); - - // Extract rows and call process per row - final int size = testData.bigTableBatch.size; - for (int r = 0; r < size; r++) { - vectorExtractRow.extractRow(testData.bigTableBatch, r, row); - operator.process(row, 0); - } + Object[][] bigTableRandomRows = testData.bigTableRandomRows; + final int rowCount = bigTableRandomRows.length; + for (int i = 0; i < rowCount; i++) { + Object[] row = bigTableRandomRows[i]; + operator.process(row, 0); } - operator.closeOp(false); + + // Close the operator tree. + operator.close(false); } public static void driveVectorBigTableData(MapJoinTestDescription testDesc, MapJoinTestData testData, MapJoinOperator operator) throws HiveException { - testData.bigTableBatchStream.reset(); - while (testData.bigTableBatchStream.isNext()) { - testData.bigTableBatch.reset(); - testData.bigTableBatchStream.fillNext(testData.bigTableBatch); - + testData.bigTableBatchSource.resetBatchIteration(); + while (testData.bigTableBatchSource.fillNextBatch(testData.bigTableBatch)) { operator.process(testData.bigTableBatch, 0); } - operator.closeOp(false); + + // Close the operator tree. + operator.close(false); } public static void generateVariationData(MapJoinTestData testData, @@ -219,6 +285,7 @@ public static void generateVariationData(MapJoinTestData testData, break; case INNER: case OUTER: + case FULL_OUTER: testData.generateRandomSmallTableCounts(testDesc, random); testData.generateRandomSmallTableValues(testDesc, random); break; @@ -230,10 +297,15 @@ public static void generateVariationData(MapJoinTestData testData, private static RowTestObjects generateRandomSmallTableValueRow(MapJoinTestDescription testDesc, Random random) { final int columnCount = testDesc.smallTableValueTypeInfos.length; - Object[] smallTableValueRow = VectorRandomRowSource.randomWritablePrimitiveRow(columnCount, random, - testDesc.smallTableValuePrimitiveTypeInfos); + PrimitiveTypeInfo[] primitiveTypeInfos = new PrimitiveTypeInfo[columnCount]; + for (int i = 0; i < columnCount; i++) { + primitiveTypeInfos[i] = (PrimitiveTypeInfo) testDesc.smallTableValueTypeInfos[i]; + } + Object[] smallTableValueRow = + VectorRandomRowSource.randomWritablePrimitiveRow( + columnCount, random, primitiveTypeInfos); for (int c = 0; c < smallTableValueRow.length; c++) { - smallTableValueRow[c] = ((PrimitiveObjectInspector) testDesc.smallTableObjectInspectors[c]).copyObject(smallTableValueRow[c]); + smallTableValueRow[c] = ((PrimitiveObjectInspector) testDesc.smallTableValueObjectInspectors[c]).copyObject(smallTableValueRow[c]); } return new RowTestObjects(smallTableValueRow); } @@ -241,7 +313,7 @@ private static RowTestObjects generateRandomSmallTableValueRow(MapJoinTestDescri private void generateRandomSmallTableCounts(MapJoinTestDescription testDesc, Random random) { smallTableValueCounts = new ArrayList(); for (Entry testKeyEntry : smallTableKeyHashMap.entrySet()) { - final int valueCount = 1 + random.nextInt(19); + final int valueCount = 1 + random.nextInt(3); smallTableValueCounts.add(valueCount); } } @@ -258,15 +330,26 @@ private void generateRandomSmallTableValues(MapJoinTestDescription testDesc, Ran } } - private static GenerateType[] generateTypesFromTypeInfos(TypeInfo[] typeInfos) { + private static List generationSpecListFromTypeInfos(TypeInfo[] typeInfos, + int keyCount, boolean isOuterJoin) { + + List generationSpecList = new ArrayList(); + final int size = typeInfos.length; - GenerateType[] generateTypes = new GenerateType[size]; for (int i = 0; i < size; i++) { - PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfos[i]; - GenerateCategory category = - GenerateCategory.generateCategoryFromPrimitiveCategory(primitiveTypeInfo.getPrimitiveCategory()); - generateTypes[i] = new GenerateType(category); + TypeInfo typeInfo = typeInfos[i]; + final boolean columnAllowNulls; + if (i >= keyCount) { + + // Value columns can be NULL. + columnAllowNulls = true; + } else { + + // Non-OUTER JOIN operators expect NULL keys to have been filtered out. + columnAllowNulls = isOuterJoin; + } + generationSpecList.add(GenerationSpec.createSameType(typeInfo, columnAllowNulls)); } - return generateTypes; + return generationSpecList; } } \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestDescription.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestDescription.java index bde442491c..93fdb28eb5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestDescription.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/MapJoinTestDescription.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector.mapjoin; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -27,20 +28,23 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; public class MapJoinTestDescription extends DescriptionTest { + public static enum MapJoinPlanVariation { + DYNAMIC_PARTITION_HASH_JOIN, + SHARED_SMALL_TABLE + } + public static class SmallTableGenerationParameters { public static enum ValueOption { NO_RESTRICTION, ONLY_ONE, - ONLY_TWO, - AT_LEAST_TWO + NO_REGULAR_SMALL_KEYS } private ValueOption valueOption; @@ -82,70 +86,103 @@ public int getNoMatchKeyOutOfAThousand() { final VectorMapJoinVariation vectorMapJoinVariation; // Adjustable. - public String[] bigTableColumnNames; + public String[] bigTableKeyColumnNames; public TypeInfo[] bigTableTypeInfos; + public int[] bigTableKeyColumnNums; - public String[] smallTableValueColumnNames; + public TypeInfo[] smallTableValueTypeInfos; - public int[] bigTableRetainColumnNums; + public int[] smallTableRetainKeyColumnNums; - public int[] smallTableRetainValueColumnNums; public SmallTableGenerationParameters smallTableGenerationParameters; // Derived. - public List bigTableColumnNamesList; - public String[] bigTableKeyColumnNames; - public TypeInfo[] bigTableKeyTypeInfos; - public List smallTableValueColumnNamesList; + + public int[] bigTableColumnNums; + public String[] bigTableColumnNames; + public List bigTableColumnNameList; public ObjectInspector[] bigTableObjectInspectors; - public List bigTableObjectInspectorsList; + public List bigTableObjectInspectorList; + + public TypeInfo[] bigTableKeyTypeInfos; + + public List smallTableKeyColumnNameList; + public String[] smallTableKeyColumnNames; + public TypeInfo[] smallTableKeyTypeInfos; + public ObjectInspector[] smallTableKeyObjectInspectors; + public List smallTableKeyObjectInspectorList; + + public List smallTableValueColumnNameList; + public String[] smallTableValueColumnNames; + public ObjectInspector[] smallTableValueObjectInspectors; + public List smallTableValueObjectInspectorList; + + public int[] bigTableRetainColumnNums; + public int[] smallTableRetainValueColumnNums; + + public String[] smallTableColumnNames; + public List smallTableColumnNameList; + public TypeInfo[] smallTableTypeInfos; + public List smallTableObjectInspectorList; + public StandardStructObjectInspector bigTableStandardObjectInspector; - public PrimitiveTypeInfo[] smallTableValuePrimitiveTypeInfos; - public ObjectInspector[] smallTableObjectInspectors; - public PrimitiveCategory[] smallTablePrimitiveCategories; - public List smallTableObjectInspectorsList; public StandardStructObjectInspector smallTableStandardObjectInspector; public ObjectInspector[] inputObjectInspectors; + public String[] outputColumnNames; public TypeInfo[] outputTypeInfos; public ObjectInspector[] outputObjectInspectors; + final MapJoinPlanVariation mapJoinPlanVariation; + + public MapJoinTestDescription ( + HiveConf hiveConf, + VectorMapJoinVariation vectorMapJoinVariation, + TypeInfo[] bigTableTypeInfos, + int[] bigTableKeyColumnNums, + TypeInfo[] smallTableValueTypeInfos, + int[] smallTableRetainKeyColumnNums, + SmallTableGenerationParameters smallTableGenerationParameters, + MapJoinPlanVariation mapJoinPlanVariation) { + this( + hiveConf, + vectorMapJoinVariation, + /* bigTableColumnNames */ null, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + } + public MapJoinTestDescription ( HiveConf hiveConf, VectorMapJoinVariation vectorMapJoinVariation, - String[] bigTableColumnNames, TypeInfo[] bigTableTypeInfos, + String[] bigTableColumnNames, + TypeInfo[] bigTableTypeInfos, int[] bigTableKeyColumnNums, - String[] smallTableValueColumnNames, TypeInfo[] smallTableValueTypeInfos, - int[] bigTableRetainColumnNums, - int[] smallTableRetainKeyColumnNums, int[] smallTableRetainValueColumnNums, - SmallTableGenerationParameters smallTableGenerationParameters) { + TypeInfo[] smallTableValueTypeInfos, + int[] smallTableRetainKeyColumnNums, + SmallTableGenerationParameters smallTableGenerationParameters, + MapJoinPlanVariation mapJoinPlanVariation) { super(hiveConf); + this.vectorMapJoinVariation = vectorMapJoinVariation; this.bigTableColumnNames = bigTableColumnNames; this.bigTableTypeInfos = bigTableTypeInfos; this.bigTableKeyColumnNums = bigTableKeyColumnNums; - this.smallTableValueColumnNames = smallTableValueColumnNames; + this.smallTableValueTypeInfos = smallTableValueTypeInfos; - this.bigTableRetainColumnNums = bigTableRetainColumnNums; - this.smallTableRetainKeyColumnNums = smallTableRetainKeyColumnNums; - this.smallTableRetainValueColumnNums = smallTableRetainValueColumnNums; + + this.smallTableRetainKeyColumnNums = smallTableRetainKeyColumnNums;; this.smallTableGenerationParameters = smallTableGenerationParameters; - switch (vectorMapJoinVariation) { - case INNER_BIG_ONLY: - case LEFT_SEMI: - trimAwaySmallTableValueInfo(); - break; - case INNER: - case OUTER: - break; - default: - throw new RuntimeException("Unknown operator variation " + vectorMapJoinVariation); - } + this.mapJoinPlanVariation = mapJoinPlanVariation; computeDerived(); } @@ -155,45 +192,121 @@ public SmallTableGenerationParameters getSmallTableGenerationParameters() { } public void computeDerived() { - bigTableColumnNamesList = Arrays.asList(bigTableColumnNames); - bigTableKeyColumnNames = new String[bigTableKeyColumnNums.length]; - bigTableKeyTypeInfos = new TypeInfo[bigTableKeyColumnNums.length]; - for (int i = 0; i < bigTableKeyColumnNums.length; i++) { - bigTableKeyColumnNames[i] = bigTableColumnNames[bigTableKeyColumnNums[i]]; - bigTableKeyTypeInfos[i] = bigTableTypeInfos[bigTableKeyColumnNums[i]]; + final int bigTableSize = bigTableTypeInfos.length; + + if (bigTableKeyColumnNames == null) { + + // Automatically populate. + bigTableColumnNames = new String[bigTableSize]; + for (int i = 0; i < bigTableSize; i++) { + bigTableColumnNames[i] = HiveConf.getColumnInternalName(i); + } } - smallTableValueColumnNamesList = Arrays.asList(smallTableValueColumnNames); + // Automatically populate. + bigTableColumnNums = new int[bigTableSize]; - bigTableObjectInspectors = new ObjectInspector[bigTableTypeInfos.length]; - for (int i = 0; i < bigTableTypeInfos.length; i++) { + for (int i = 0; i < bigTableSize; i++) { + bigTableColumnNums[i] = i; + } + + // Automatically populate. + bigTableRetainColumnNums = new int[bigTableSize]; + for (int i = 0; i < bigTableSize; i++) { + bigTableRetainColumnNums[i] = i; + } + + /* + * Big Table key information. + */ + final int keySize = bigTableKeyColumnNums.length; + + bigTableKeyColumnNames = new String[keySize]; + bigTableKeyTypeInfos = new TypeInfo[keySize]; + for (int i = 0; i < keySize; i++) { + final int bigTableKeyColumnNum = bigTableKeyColumnNums[i]; + bigTableKeyColumnNames[i] = bigTableColumnNames[bigTableKeyColumnNum]; + bigTableKeyTypeInfos[i] = bigTableTypeInfos[bigTableKeyColumnNum]; + } + + /* + * Big Table object inspectors. + */ + bigTableObjectInspectors = new ObjectInspector[bigTableSize]; + for (int i = 0; i < bigTableSize; i++) { bigTableObjectInspectors[i] = - PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector((PrimitiveTypeInfo) bigTableTypeInfos[i]); + PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + (PrimitiveTypeInfo) bigTableTypeInfos[i]); + } + bigTableColumnNameList = Arrays.asList(bigTableColumnNames); + bigTableObjectInspectorList = Arrays.asList(bigTableObjectInspectors); + + /* + * Small Table key object inspectors are derived directly from the Big Table key information. + */ + smallTableKeyColumnNames = new String[keySize]; + smallTableKeyTypeInfos = Arrays.copyOf(bigTableKeyTypeInfos, keySize); + smallTableKeyObjectInspectors = new ObjectInspector[keySize]; + for (int i = 0; i < keySize; i++) { + smallTableKeyColumnNames[i] = HiveConf.getColumnInternalName(i); + final int bigTableKeyColumnNum = bigTableKeyColumnNums[i]; + smallTableKeyObjectInspectors[i] = bigTableObjectInspectors[bigTableKeyColumnNum]; + } + smallTableKeyColumnNameList = Arrays.asList(smallTableKeyColumnNames); + smallTableKeyObjectInspectorList = Arrays.asList(smallTableKeyObjectInspectors); + + // First part of Small Table information is the key information. + smallTableColumnNameList = new ArrayList(smallTableKeyColumnNameList); + List smallTableTypeInfoList = + new ArrayList(Arrays.asList(smallTableKeyTypeInfos)); + smallTableObjectInspectorList = new ArrayList(); + smallTableObjectInspectorList.addAll(smallTableKeyObjectInspectorList); + + final int valueSize = smallTableValueTypeInfos.length; + + // Automatically populate. + smallTableValueColumnNames = new String[valueSize]; + for (int i = 0; i < valueSize; i++) { + smallTableValueColumnNames[i] = HiveConf.getColumnInternalName(keySize + i); } - bigTableObjectInspectorsList = Arrays.asList(bigTableObjectInspectors); - smallTableObjectInspectors = new ObjectInspector[smallTableValueTypeInfos.length]; - smallTablePrimitiveCategories = new PrimitiveCategory[smallTableValueTypeInfos.length]; - smallTableValuePrimitiveTypeInfos = new PrimitiveTypeInfo[smallTableValueTypeInfos.length]; - for (int i = 0; i < smallTableValueTypeInfos.length; i++) { - PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) smallTableValueTypeInfos[i]; - smallTableObjectInspectors[i] = - PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(primitiveTypeInfo); - smallTablePrimitiveCategories[i] = primitiveTypeInfo.getPrimitiveCategory(); - smallTableValuePrimitiveTypeInfos[i] = primitiveTypeInfo; + smallTableValueObjectInspectors = new ObjectInspector[valueSize]; + for (int i = 0; i < valueSize; i++) { + smallTableValueObjectInspectors[i] = + PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + (PrimitiveTypeInfo) smallTableValueTypeInfos[i]); } - smallTableObjectInspectorsList = Arrays.asList(smallTableObjectInspectors); + smallTableValueColumnNameList = Arrays.asList(smallTableValueColumnNames); + smallTableTypeInfoList.addAll(Arrays.asList(smallTableValueTypeInfos)); + smallTableValueObjectInspectorList = Arrays.asList(smallTableValueObjectInspectors); + smallTableColumnNameList.addAll(smallTableValueColumnNameList); + smallTableColumnNames = smallTableColumnNameList.toArray(new String[0]); + smallTableTypeInfos = smallTableTypeInfoList.toArray(new TypeInfo[0]); + + smallTableObjectInspectorList.addAll(smallTableValueObjectInspectorList); + + /* + * The inputObjectInspectors describe the keys and values of the Big Table and Small Table. + */ bigTableStandardObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( - bigTableColumnNamesList, Arrays.asList((ObjectInspector[]) bigTableObjectInspectors)); + bigTableColumnNameList, bigTableObjectInspectorList); smallTableStandardObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( - smallTableValueColumnNamesList, Arrays.asList((ObjectInspector[]) smallTableObjectInspectors)); + smallTableColumnNameList, smallTableObjectInspectorList); inputObjectInspectors = - new ObjectInspector[] { bigTableStandardObjectInspector, smallTableStandardObjectInspector }; + new ObjectInspector[] { + bigTableStandardObjectInspector, smallTableStandardObjectInspector }; + + // For now, we always retain the Small Table values... + // Automatically populate. + smallTableRetainValueColumnNums = new int[valueSize]; + for (int i = 0; i < valueSize; i++) { + smallTableRetainValueColumnNums[i] = i; + } int outputLength = bigTableRetainColumnNums.length + @@ -203,12 +316,13 @@ public void computeDerived() { outputTypeInfos = new TypeInfo[outputLength]; int outputIndex = 0; - for (int i = 0; i < bigTableRetainColumnNums.length; i++) { + final int bigTableRetainSize = bigTableRetainColumnNums.length; + for (int i = 0; i < bigTableRetainSize; i++) { outputTypeInfos[outputIndex++] = bigTableTypeInfos[bigTableRetainColumnNums[i]]; } - // for (int i = 0; i < smallTableRetainKeyColumnNums.length; i++) { - // outputTypeInfos[outputIndex++] = smallTableTypeInfos[smallTableRetainKeyColumnNums[i]]; - // } + for (int i = 0; i < smallTableRetainKeyColumnNums.length; i++) { + outputTypeInfos[outputIndex++] = smallTableKeyTypeInfos[smallTableRetainKeyColumnNums[i]]; + } for (int i = 0; i < smallTableRetainValueColumnNums.length; i++) { outputTypeInfos[outputIndex++] = smallTableValueTypeInfos[smallTableRetainValueColumnNums[i]]; } @@ -221,13 +335,6 @@ public void computeDerived() { } } - public void trimAwaySmallTableValueInfo() { - smallTableValueColumnNames = new String[] {}; - smallTableValueTypeInfos = new TypeInfo[] {}; - smallTableRetainKeyColumnNums = new int[] {}; - smallTableRetainValueColumnNums = new int[] {}; - } - private String[] createOutputColumnNames(int outputColumnCount) { String[] outputColumnNames = new String[outputColumnCount]; int counter = 1; diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/NoOpExpression.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/NoOpExpression.java new file mode 100644 index 0000000000..47e927d4a5 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/NoOpExpression.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.exec.vector.mapjoin; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; + +/** + * An expression representing a column, only children are evaluated. + */ +public class NoOpExpression extends VectorExpression { + + private static final long serialVersionUID = 1L; + + public NoOpExpression() { + } + + public NoOpExpression(int colNum) { + super(colNum); + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + } + + @Override + public String vectorExpressionParameters() { + return "noOpCol" + outputColumnNum + ":" + + getTypeName(outputTypeInfo, outputDataTypePhysicalVariation); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()).build(); + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestMapJoinOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestMapJoinOperator.java index 4c41f9c4f8..a37b5a07fb 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestMapJoinOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/TestMapJoinOperator.java @@ -25,7 +25,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; -import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext; import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer; @@ -33,9 +32,7 @@ import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CountCollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.CountVectorCollectorTestOperator; -import org.apache.hadoop.hive.ql.exec.util.collectoroperator.RowCollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.collectoroperator.RowCollectorTestOperatorBase; -import org.apache.hadoop.hive.ql.exec.util.collectoroperator.RowVectorCollectorTestOperator; import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects; import org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjectsMultiSet; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -48,17 +45,23 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow; import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource; import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; +import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion; import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; -import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerateStream; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.CreateMapJoinResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.MapJoinTestImplementation; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.TestMultiSetCollectorOperator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestConfig.TestMultiSetVectorCollectorOperator; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.MapJoinPlanVariation; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters.ValueOption; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastMultiKeyHashMap; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastTableContainer; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VerifyFastRow; @@ -69,7 +72,6 @@ import org.apache.hadoop.hive.ql.plan.JoinCondDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; -import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc; @@ -86,14 +88,13 @@ import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite; import org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinarySerializeWrite; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.BytesWritable; @@ -101,6 +102,7 @@ import org.apache.hive.common.util.HashCodeUtil; import org.apache.hive.common.util.ReflectionUtil; import org.junit.Test; +import org.junit.Ignore; import java.io.IOException; import java.util.ArrayList; @@ -120,233 +122,1355 @@ public class TestMapJoinOperator { - /* - * This test collector operator is for MapJoin row-mode. - */ - private class TestMultiSetCollectorOperator extends RowCollectorTestOperator { + private boolean addLongHiveConfVariation(int hiveConfVariation, HiveConf hiveConf) { + + // Set defaults. + HiveConf.setBoolVar( + hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_MINMAX_ENABLED, false); + HiveConf.setIntVar( + hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_OVERFLOW_REPEATED_THRESHOLD, -1); + + switch (hiveConfVariation) { + case 0: + break; + case 1: + HiveConf.setBoolVar( + hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_MINMAX_ENABLED, true); + break; + case 2: + // Force generateHashMapResultLargeMultiValue to be used. + HiveConf.setIntVar( + hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_OVERFLOW_REPEATED_THRESHOLD, 5); + break; + default: + return false; + } + return true; + } + + private boolean goodTestVariation(MapJoinTestDescription testDesc) { + final int smallTableValueSize = testDesc.smallTableRetainValueColumnNums.length; + + switch (testDesc.vectorMapJoinVariation) { + case INNER: + return (smallTableValueSize > 0); + case INNER_BIG_ONLY: + case LEFT_SEMI: + return (smallTableValueSize == 0); + case OUTER: + return true; + case FULL_OUTER: + return true; + default: + throw new RuntimeException( + "Unexpected vectorMapJoinVariation " + testDesc.vectorMapJoinVariation); + } + + } + + @Ignore + @Test + public void testLong0() throws Exception { + long seed = 234882L; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong0( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + private boolean doTestLong0(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: long key, no value; Small Table: no key retained, date value + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.longTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {}; + + smallTableValueTypeInfos = + new TypeInfo[] {TypeInfoFactory.dateTypeInfo}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong0"); + + return false; + } + + @Ignore + @Test + public void testLong0_NoRegularKeys() throws Exception { + long seed = 234882L; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong0_NoRegularKeys( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + private boolean doTestLong0_NoRegularKeys(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + smallTableGenerationParameters.setValueOption(ValueOption.NO_REGULAR_SMALL_KEYS); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: long key, no value; Small Table: no key retained, date value + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.longTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {}; + + smallTableValueTypeInfos = + new TypeInfo[] {TypeInfoFactory.dateTypeInfo}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "doTestLong0_NoRegularKeys"); + + return false; + } + + @Ignore + @Test + public void testLong1() throws Exception { + long seed = 234882L; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong1( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong1(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: int key, long value; Small Table: no key retained, string value + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.intTypeInfo, + TypeInfoFactory.longTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {}; + + smallTableValueTypeInfos = + new TypeInfo[] {TypeInfoFactory.stringTypeInfo}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong1"); + + return false; + } + + @Test + public void testLong2() throws Exception { + long seed = 3553; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong2( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong2(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: short key, no value; Small Table: key retained, timestamp value + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.shortTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = + new TypeInfo[] {TypeInfoFactory.timestampTypeInfo}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong2"); + + return false; + } + + + @Test + public void testLong3() throws Exception { + long seed = 9934; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong3( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong3(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: int key, string value; Small Table: key retained, decimal value + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.intTypeInfo, + TypeInfoFactory.stringTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = + new TypeInfo[] { + new DecimalTypeInfo(38, 18)}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong3"); + + return false; + } + + @Test + public void testLong3_NoRegularKeys() throws Exception { + long seed = 9934; + int rowCount = 10; - private final RowTestObjectsMultiSet testRowMultiSet; + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong3_NoRegularKeys( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong3_NoRegularKeys(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + smallTableGenerationParameters.setValueOption(ValueOption.NO_REGULAR_SMALL_KEYS); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: int key, string value; Small Table: key retained, decimal value + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.intTypeInfo, + TypeInfoFactory.stringTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = + new TypeInfo[] { + new DecimalTypeInfo(38, 18)}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "doTestLong3_NoRegularKeys"); + + return false; + } + + @Test + public void testLong4() throws Exception { + long seed = 3982; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong4( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong4(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: int key, no value; Small Table: no key retained, no value + // (exercise INNER_BIGONLY, LEFT_SEMI) + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.intTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {}; + + smallTableValueTypeInfos = new TypeInfo[] {}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong4"); + + return false; + } + + @Test + public void testLong5() throws Exception { + long seed = 3553; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong5( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong5(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + // Cause there to be no regular FULL OUTER MapJoin MATCHes so only non-match Small Table + // results. + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: long key, no value; Small Table: key retained, no value + // (exercise INNER_BIGONLY, LEFT_SEMI) + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.longTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = new TypeInfo[] {}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong5"); + + return false; + } + + @Test + public void testLong6() throws Exception { + long seed = 9384; + int rowCount = 10; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestLong6( + seed, rowCount, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestLong6(long seed, int rowCount, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + HiveConf hiveConf = new HiveConf(); + + if (!addLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + // Cause there to be no regular FULL OUTER MapJoin MATCHes so only non-match Small Table + // results. + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Big Table: long key, timestamp value; Small Table: key retained, no value + // (exercise INNER_BIGONLY, LEFT_SEMI) + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.longTypeInfo, + TypeInfoFactory.timestampTypeInfo}; + + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = new TypeInfo[] {}; + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testLong6"); + + return false; + } + + private boolean addNonLongHiveConfVariation(int hiveConfVariation, HiveConf hiveConf) { + + // Set defaults. + HiveConf.setIntVar( + hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_OVERFLOW_REPEATED_THRESHOLD, -1); + + switch (hiveConfVariation) { + case 0: + break; + case 1: + // Force generateHashMapResultLargeMultiValue to be used. + HiveConf.setIntVar( + hiveConf, + HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_OVERFLOW_REPEATED_THRESHOLD, 5); + break; + default: + return false; + } + return true; + } + + @Test + public void testMultiKey0() throws Exception { + long seed = 28322; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestMultiKey0( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestMultiKey0(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + int rowCount = 10; + + HiveConf hiveConf = new HiveConf(); + + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Three key columns. + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.shortTypeInfo, + TypeInfoFactory.intTypeInfo}; + bigTableKeyColumnNums = new int[] {0, 1}; + + smallTableRetainKeyColumnNums = new int[] {0, 1}; + + smallTableValueTypeInfos = new TypeInfo[] {}; + + //---------------------------------------------------------------------------------------------- + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + // Prepare data. Good for ANY implementation variation. + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testMultiKey0"); + + return false; + } + + @Test + public void testMultiKey1() throws Exception { + long seed = 87543; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestMultiKey1( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestMultiKey1(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + int rowCount = 10; + + HiveConf hiveConf = new HiveConf(); - public TestMultiSetCollectorOperator( - ObjectInspector[] outputObjectInspectors, - RowTestObjectsMultiSet testRowMultiSet) { - super(outputObjectInspectors); - this.testRowMultiSet = testRowMultiSet; + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; } - public RowTestObjectsMultiSet getTestRowMultiSet() { - return testRowMultiSet; - } + TypeInfo[] bigTableTypeInfos = null; - public void nextTestRow(RowTestObjects testRow) { - testRowMultiSet.add(testRow); - } + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; - @Override - public String getName() { - return TestMultiSetCollectorOperator.class.getSimpleName(); + // Three key columns. + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.timestampTypeInfo, + TypeInfoFactory.shortTypeInfo, + TypeInfoFactory.stringTypeInfo}; + bigTableKeyColumnNums = new int[] {0, 1, 2}; + + smallTableRetainKeyColumnNums = new int[] {0, 1, 2}; + + smallTableValueTypeInfos = + new TypeInfo[] {new DecimalTypeInfo(38, 18)}; + + //---------------------------------------------------------------------------------------------- + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; } + + // Prepare data. Good for ANY implementation variation. + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testMultiKey1"); + + return false; + } + + @Test + public void testMultiKey2() throws Exception { + long seed = 87543; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestMultiKey2( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); } - private class TestMultiSetVectorCollectorOperator extends RowVectorCollectorTestOperator { + public boolean doTestMultiKey2(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { - private final RowTestObjectsMultiSet testRowMultiSet; + int rowCount = 10; + + HiveConf hiveConf = new HiveConf(); - public RowTestObjectsMultiSet getTestRowMultiSet() { - return testRowMultiSet; + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; } - public TestMultiSetVectorCollectorOperator(TypeInfo[] outputTypeInfos, - ObjectInspector[] outputObjectInspectors, RowTestObjectsMultiSet testRowMultiSet) - throws HiveException { - super(outputTypeInfos, outputObjectInspectors); - this.testRowMultiSet = testRowMultiSet; - } + TypeInfo[] bigTableTypeInfos = null; - public void nextTestRow(RowTestObjects testRow) { - testRowMultiSet.add(testRow); - } + int[] bigTableKeyColumnNums = null; - @Override - public String getName() { - return TestMultiSetVectorCollectorOperator.class.getSimpleName(); - } - } + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; - private static class KeyConfig { - long seed; - PrimitiveTypeInfo primitiveTypeInfo; - KeyConfig(long seed, PrimitiveTypeInfo primitiveTypeInfo) { - this.seed = seed; - this.primitiveTypeInfo = primitiveTypeInfo; + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Three key columns. + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.longTypeInfo, + TypeInfoFactory.shortTypeInfo, + TypeInfoFactory.stringTypeInfo}; + bigTableKeyColumnNums = new int[] {0, 1, 2}; + + smallTableRetainKeyColumnNums = new int[] {0, 1, 2}; + + smallTableValueTypeInfos = + new TypeInfo[] { + TypeInfoFactory.stringTypeInfo}; + + //---------------------------------------------------------------------------------------------- + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; } + + // Prepare data. Good for ANY implementation variation. + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testMultiKey0"); + + return false; } - private static KeyConfig[] longKeyConfigs = new KeyConfig[] { - new KeyConfig(234882L, TypeInfoFactory.longTypeInfo), - new KeyConfig(4600L, TypeInfoFactory.intTypeInfo), - new KeyConfig(98743L, TypeInfoFactory.shortTypeInfo)}; @Test - public void testLong() throws Exception { - for (KeyConfig longKeyConfig : longKeyConfigs) { + public void testMultiKey3() throws Exception { + long seed = 87543; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { - if (vectorMapJoinVariation == VectorMapJoinVariation.NONE){ - continue; - } - doTestLong(longKeyConfig.seed, longKeyConfig.primitiveTypeInfo, vectorMapJoinVariation); + hiveConfVariationsDone = + doTestMultiKey3( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); } - } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); } - public void doTestLong(long seed, TypeInfo numberTypeInfo, - VectorMapJoinVariation vectorMapJoinVariation) throws Exception { + public boolean doTestMultiKey3(long seed, int hiveConfVariation, VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { - int rowCount = 10000; + int rowCount = 10; HiveConf hiveConf = new HiveConf(); - String[] bigTableColumnNames = new String[] {"number1"}; - TypeInfo[] bigTableTypeInfos = - new TypeInfo[] { - TypeInfoFactory.longTypeInfo}; - int[] bigTableKeyColumnNums = new int[] {0}; + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; - String[] smallTableValueColumnNames = new String[] {"sv1", "sv2"}; - TypeInfo[] smallTableValueTypeInfos = - new TypeInfo[] {TypeInfoFactory.dateTypeInfo, TypeInfoFactory.stringTypeInfo}; + int[] bigTableKeyColumnNums = null; - int[] bigTableRetainColumnNums = new int[] {0}; + TypeInfo[] smallTableValueTypeInfos = null; - int[] smallTableRetainKeyColumnNums = new int[] {}; - int[] smallTableRetainValueColumnNums = new int[] {0, 1}; + int[] smallTableRetainKeyColumnNums = null; SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters(); + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // Three key columns. + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.dateTypeInfo, + TypeInfoFactory.byteTypeInfo}; + bigTableKeyColumnNums = new int[] {0, 1}; + + smallTableRetainKeyColumnNums = new int[] {0, 1}; + + smallTableValueTypeInfos = + new TypeInfo[] {}; + //---------------------------------------------------------------------------------------------- - MapJoinTestDescription testDesc = new MapJoinTestDescription( - hiveConf, vectorMapJoinVariation, - bigTableColumnNames, bigTableTypeInfos, - bigTableKeyColumnNums, - smallTableValueColumnNames, smallTableValueTypeInfos, - bigTableRetainColumnNums, - smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, - smallTableGenerationParameters); + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } // Prepare data. Good for ANY implementation variation. - MapJoinTestData testData = - new MapJoinTestData(rowCount, testDesc, seed, seed * 10); + testData = + new MapJoinTestData(rowCount, testDesc, seed); - executeTest(testDesc, testData); + executeTest(testDesc, testData, "testMultiKey3"); + + return false; } @Test - public void testMultiKey() throws Exception { + public void testString0() throws Exception { long seed = 87543; - for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { - if (vectorMapJoinVariation == VectorMapJoinVariation.NONE){ - continue; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestString0( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); } - doTestMultiKey(seed, vectorMapJoinVariation); - } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); } - public void doTestMultiKey(long seed, VectorMapJoinVariation vectorMapJoinVariation) throws Exception { + public boolean doTestString0(long seed, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { - int rowCount = 10000; + int rowCount = 10; HiveConf hiveConf = new HiveConf(); - String[] bigTableColumnNames = new String[] {"b1", "b2", "b3"}; - TypeInfo[] bigTableTypeInfos = + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; + + int[] bigTableKeyColumnNums = null; + + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; + + SmallTableGenerationParameters smallTableGenerationParameters = + new SmallTableGenerationParameters(); + + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // One plain STRING key column. + bigTableTypeInfos = new TypeInfo[] { - TypeInfoFactory.intTypeInfo, - TypeInfoFactory.longTypeInfo, TypeInfoFactory.stringTypeInfo}; - int[] bigTableKeyColumnNums = new int[] {0, 1, 2}; + bigTableKeyColumnNums = new int[] {0}; - String[] smallTableValueColumnNames = new String[] {"sv1"}; - TypeInfo[] smallTableValueTypeInfos = - new TypeInfo[] {TypeInfoFactory.stringTypeInfo}; + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = + new TypeInfo[] {TypeInfoFactory.dateTypeInfo, TypeInfoFactory.timestampTypeInfo}; + + //---------------------------------------------------------------------------------------------- + + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } + + // Prepare data. Good for ANY implementation variation. + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testString0"); + + return false; + } + + @Test + public void testString1() throws Exception { + long seed = 3422; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestString1( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); + } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); + } + + public boolean doTestString1(long seed, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { + + int rowCount = 10; + + HiveConf hiveConf = new HiveConf(); + + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } + + TypeInfo[] bigTableTypeInfos = null; - int[] bigTableRetainColumnNums = new int[] {0, 1, 2}; + int[] bigTableKeyColumnNums = null; - int[] smallTableRetainKeyColumnNums = new int[] {}; - int[] smallTableRetainValueColumnNums = new int[] {0}; + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters(); + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // One BINARY key column. + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.binaryTypeInfo}; + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = + new TypeInfo[] { + TypeInfoFactory.shortTypeInfo, + TypeInfoFactory.floatTypeInfo, + new DecimalTypeInfo(38, 18)}; + + smallTableGenerationParameters = + new SmallTableGenerationParameters(); + //---------------------------------------------------------------------------------------------- - MapJoinTestDescription testDesc = new MapJoinTestDescription( - hiveConf, vectorMapJoinVariation, - bigTableColumnNames, bigTableTypeInfos, - bigTableKeyColumnNums, - smallTableValueColumnNames, smallTableValueTypeInfos, - bigTableRetainColumnNums, - smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, - smallTableGenerationParameters); + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } // Prepare data. Good for ANY implementation variation. - MapJoinTestData testData = - new MapJoinTestData(rowCount, testDesc, seed, seed * 10); + testData = + new MapJoinTestData(rowCount, testDesc, seed); - executeTest(testDesc, testData); + executeTest(testDesc, testData, "testString1"); + + return false; } @Test - public void testString() throws Exception { - long seed = 87543; - for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { - if (vectorMapJoinVariation == VectorMapJoinVariation.NONE){ - continue; + public void testString2() throws Exception { + long seed = 7439; + + int hiveConfVariation = 0; + boolean hiveConfVariationsDone = false; + do { + for (VectorMapJoinVariation vectorMapJoinVariation : VectorMapJoinVariation.values()) { + hiveConfVariationsDone = + doTestString2( + seed, hiveConfVariation, vectorMapJoinVariation, + MapJoinPlanVariation.DYNAMIC_PARTITION_HASH_JOIN); } - doTestString(seed, vectorMapJoinVariation); - } + seed++; + hiveConfVariation++; + } while (!hiveConfVariationsDone); } - public void doTestString(long seed, VectorMapJoinVariation vectorMapJoinVariation) throws Exception { + public boolean doTestString2(long seed, int hiveConfVariation, + VectorMapJoinVariation vectorMapJoinVariation, + MapJoinPlanVariation mapJoinPlanVariation) throws Exception { - int rowCount = 10000; + int rowCount = 10; HiveConf hiveConf = new HiveConf(); - String[] bigTableColumnNames = new String[] {"b1"}; - TypeInfo[] bigTableTypeInfos = - new TypeInfo[] { - TypeInfoFactory.stringTypeInfo}; - int[] bigTableKeyColumnNums = new int[] {0}; + if (!addNonLongHiveConfVariation(hiveConfVariation, hiveConf)) { + return true; + } - String[] smallTableValueColumnNames = new String[] {"sv1", "sv2"}; - TypeInfo[] smallTableValueTypeInfos = - new TypeInfo[] {TypeInfoFactory.dateTypeInfo, TypeInfoFactory.timestampTypeInfo}; + TypeInfo[] bigTableTypeInfos = null; - int[] bigTableRetainColumnNums = new int[] {0}; + int[] bigTableKeyColumnNums = null; - int[] smallTableRetainKeyColumnNums = new int[] {}; - int[] smallTableRetainValueColumnNums = new int[] {0, 1}; + TypeInfo[] smallTableValueTypeInfos = null; + + int[] smallTableRetainKeyColumnNums = null; SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters(); + MapJoinTestDescription testDesc = null; + MapJoinTestData testData = null; + + // One STRING key column; Small Table value: NONE (tests INNER_BIG_ONLY, LEFT_SEMI). + bigTableTypeInfos = + new TypeInfo[] { + TypeInfoFactory.stringTypeInfo}; + bigTableKeyColumnNums = new int[] {0}; + + smallTableRetainKeyColumnNums = new int[] {0}; + + smallTableValueTypeInfos = new TypeInfo[] {}; + + smallTableGenerationParameters = + new SmallTableGenerationParameters(); + //---------------------------------------------------------------------------------------------- - MapJoinTestDescription testDesc = new MapJoinTestDescription( - hiveConf, vectorMapJoinVariation, - bigTableColumnNames, bigTableTypeInfos, - bigTableKeyColumnNums, - smallTableValueColumnNames, smallTableValueTypeInfos, - bigTableRetainColumnNums, - smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, - smallTableGenerationParameters); + testDesc = + new MapJoinTestDescription( + hiveConf, vectorMapJoinVariation, + bigTableTypeInfos, + bigTableKeyColumnNums, + smallTableValueTypeInfos, + smallTableRetainKeyColumnNums, + smallTableGenerationParameters, + mapJoinPlanVariation); + + if (!goodTestVariation(testDesc)) { + return false; + } // Prepare data. Good for ANY implementation variation. - MapJoinTestData testData = - new MapJoinTestData(rowCount, testDesc, seed, seed * 10); + testData = + new MapJoinTestData(rowCount, testDesc, seed); + + executeTest(testDesc, testData, "testString2"); - executeTest(testDesc, testData); + return false; } private void addBigTableRetained(MapJoinTestDescription testDesc, Object[] bigTableRowObjects, @@ -357,14 +1481,32 @@ private void addBigTableRetained(MapJoinTestDescription testDesc, Object[] bigTa } } - private void addToOutput(MapJoinTestDescription testDesc, RowTestObjectsMultiSet expectedTestRowMultiSet, - Object[] outputObjects) { + private void addToOutput(MapJoinTestDescription testDesc, + RowTestObjectsMultiSet expectedTestRowMultiSet, Object[] outputObjects, + RowTestObjectsMultiSet.RowFlag rowFlag) { for (int c = 0; c < outputObjects.length; c++) { - PrimitiveObjectInspector primitiveObjInsp = ((PrimitiveObjectInspector) testDesc.outputObjectInspectors[c]); + PrimitiveObjectInspector primitiveObjInsp = + ((PrimitiveObjectInspector) testDesc.outputObjectInspectors[c]); Object outputObject = outputObjects[c]; outputObjects[c] = primitiveObjInsp.copyObject(outputObject); } - expectedTestRowMultiSet.add(new RowTestObjects(outputObjects)); + expectedTestRowMultiSet.add(new RowTestObjects(outputObjects), rowFlag); + } + + private String rowToCsvString(Object[] rowObjects) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < rowObjects.length; i++) { + if (sb.length() > 0) { + sb.append(","); + } + Object obj = rowObjects[i]; + if (obj == null) { + sb.append("\\N"); + } else { + sb.append(obj); + } + } + return sb.toString(); } /* @@ -377,7 +1519,7 @@ private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescript RowTestObjectsMultiSet expectedTestRowMultiSet = new RowTestObjectsMultiSet(); VectorExtractRow vectorExtractRow = new VectorExtractRow(); - vectorExtractRow.init(testDesc.bigTableKeyTypeInfos); + vectorExtractRow.init(testDesc.bigTableTypeInfos); final int bigTableColumnCount = testDesc.bigTableTypeInfos.length; Object[] bigTableRowObjects = new Object[bigTableColumnCount]; @@ -385,32 +1527,36 @@ private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescript final int bigTableKeyColumnCount = testDesc.bigTableKeyTypeInfos.length; Object[] bigTableKeyObjects = new Object[bigTableKeyColumnCount]; - VectorBatchGenerateStream bigTableBatchStream = testData.getBigTableBatchStream(); + VectorRandomBatchSource bigTableBatchSource = testData.getBigTableBatchSource(); VectorizedRowBatch batch = testData.getBigTableBatch(); - bigTableBatchStream.reset(); - while (bigTableBatchStream.isNext()) { - batch.reset(); - bigTableBatchStream.fillNext(batch); + bigTableBatchSource.resetBatchIteration(); + while (bigTableBatchSource.fillNextBatch(batch)) { final int size = testData.bigTableBatch.size; for (int r = 0; r < size; r++) { vectorExtractRow.extractRow(testData.bigTableBatch, r, bigTableRowObjects); // Form key object array + boolean hasAnyNulls = false; // NULLs may be present in {FULL|LEFT|RIGHT} OUTER joins. for (int k = 0; k < bigTableKeyColumnCount; k++) { int keyColumnNum = testDesc.bigTableKeyColumnNums[k]; - bigTableKeyObjects[k] = bigTableRowObjects[keyColumnNum]; + Object keyObject = bigTableRowObjects[keyColumnNum]; + if (keyObject == null) { + hasAnyNulls = true; + } + bigTableKeyObjects[k] = keyObject; bigTableKeyObjects[k] = ((PrimitiveObjectInspector) testDesc.bigTableObjectInspectors[keyColumnNum]).copyObject(bigTableKeyObjects[k]); } RowTestObjects testKey = new RowTestObjects(bigTableKeyObjects); - if (testData.smallTableKeyHashMap.containsKey(testKey)) { + if (testData.smallTableKeyHashMap.containsKey(testKey) && !hasAnyNulls) { int smallTableKeyIndex = testData.smallTableKeyHashMap.get(testKey); switch (testDesc.vectorMapJoinVariation) { case INNER: case OUTER: + case FULL_OUTER: { // One row per value. ArrayList valueList = testData.smallTableValues.get(smallTableKeyIndex); @@ -420,36 +1566,46 @@ private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescript addBigTableRetained(testDesc, bigTableRowObjects, outputObjects); + int outputColumnNum = testDesc.bigTableRetainColumnNums.length; + + final int smallTableRetainKeyColumnNumsLength = + testDesc.smallTableRetainKeyColumnNums.length; + for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) { + outputObjects[outputColumnNum++] = + bigTableKeyObjects[testDesc.smallTableRetainKeyColumnNums[o]]; + } + Object[] valueRow = valueList.get(v).getRow(); - final int bigTableRetainColumnNumsLength = testDesc.bigTableRetainColumnNums.length; - final int smallTableRetainValueColumnNumsLength = testDesc.smallTableRetainValueColumnNums.length; + final int smallTableRetainValueColumnNumsLength = + testDesc.smallTableRetainValueColumnNums.length; for (int o = 0; o < smallTableRetainValueColumnNumsLength; o++) { - outputObjects[bigTableRetainColumnNumsLength + o] = valueRow[testDesc.smallTableRetainValueColumnNums[o]]; + outputObjects[outputColumnNum++] = + valueRow[testDesc.smallTableRetainValueColumnNums[o]]; } - addToOutput(testDesc, expectedTestRowMultiSet, outputObjects); + addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, + RowTestObjectsMultiSet.RowFlag.REGULAR); } } break; case INNER_BIG_ONLY: - { - // Value count rows. - final int valueCount = testData.smallTableValueCounts.get(smallTableKeyIndex); - for (int v = 0; v < valueCount; v++) { - Object[] outputObjects = new Object[testDesc.outputColumnNames.length]; - - addBigTableRetained(testDesc, bigTableRowObjects, outputObjects); - addToOutput(testDesc, expectedTestRowMultiSet, outputObjects); - } - } - break; case LEFT_SEMI: { - // One row (existence). Object[] outputObjects = new Object[testDesc.outputColumnNames.length]; addBigTableRetained(testDesc, bigTableRowObjects, outputObjects); - addToOutput(testDesc, expectedTestRowMultiSet, outputObjects); + + int outputColumnNum = testDesc.bigTableRetainColumnNums.length; + + final int smallTableRetainKeyColumnNumsLength = + testDesc.smallTableRetainKeyColumnNums.length; + for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) { + outputObjects[outputColumnNum++] = + bigTableKeyObjects[testDesc.smallTableRetainKeyColumnNums[o]]; + } + + addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, + RowTestObjectsMultiSet.RowFlag.REGULAR); } break; default: @@ -458,9 +1614,10 @@ private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescript } else { - // No match. + // Big Table non-match. - if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.OUTER) { + if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.OUTER || + testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) { // We need to add a non-match row with nulls for small table values. @@ -468,14 +1625,74 @@ private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescript addBigTableRetained(testDesc, bigTableRowObjects, outputObjects); - final int bigTableRetainColumnNumsLength = testDesc.bigTableRetainColumnNums.length; - final int smallTableRetainValueColumnNumsLength = testDesc.smallTableRetainValueColumnNums.length; + int outputColumnNum = testDesc.bigTableRetainColumnNums.length; + + final int smallTableRetainKeyColumnNumsLength = + testDesc.smallTableRetainKeyColumnNums.length; + for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) { + outputObjects[outputColumnNum++] = null; + } + + final int smallTableRetainValueColumnNumsLength = + testDesc.smallTableRetainValueColumnNums.length; for (int o = 0; o < smallTableRetainValueColumnNumsLength; o++) { - outputObjects[bigTableRetainColumnNumsLength + o] = null; + outputObjects[outputColumnNum++] = null; } - addToOutput(testDesc, expectedTestRowMultiSet, outputObjects); + addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, + RowTestObjectsMultiSet.RowFlag.LEFT_OUTER); + } + } + } + } + + if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) { + + System.out.println("*BENCHMARK* ----------------------------------------------------------------------"); + System.out.println("*BENCHMARK* FULL OUTER non-match key count " + + testData.fullOuterAdditionalSmallTableKeys.size()); + + // Fill in non-match Small Table key results. + for (RowTestObjects smallTableKey : testData.fullOuterAdditionalSmallTableKeys) { + + // System.out.println( + // "*BENCHMARK* fullOuterAdditionalSmallTableKey " + smallTableKey.toString()); + + int smallTableKeyIndex = testData.smallTableKeyHashMap.get(smallTableKey); + + // One row per value. + ArrayList valueList = testData.smallTableValues.get(smallTableKeyIndex); + final int valueCount = valueList.size(); + for (int v = 0; v < valueCount; v++) { + Object[] outputObjects = new Object[testDesc.outputColumnNames.length]; + + // Non-match Small Table keys produce NULL Big Table columns. + final int bigTableRetainColumnNumsLength = testDesc.bigTableRetainColumnNums.length; + for (int o = 0; o < bigTableRetainColumnNumsLength; o++) { + outputObjects[o] = null; + } + + int outputColumnNum = testDesc.bigTableRetainColumnNums.length; + + // The output result may include 0, 1, or more small key columns... + Object[] smallKeyObjects = smallTableKey.getRow(); + final int smallTableRetainKeyColumnNumsLength = + testDesc.smallTableRetainKeyColumnNums.length; + for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) { + outputObjects[outputColumnNum++] = + smallKeyObjects[testDesc.smallTableRetainKeyColumnNums[o]]; + } + + Object[] valueRow = valueList.get(v).getRow(); + final int smallTableRetainValueColumnNumsLength = + testDesc.smallTableRetainValueColumnNums.length; + for (int o = 0; o < smallTableRetainValueColumnNumsLength; o++) { + outputObjects[outputColumnNum++] = + valueRow[testDesc.smallTableRetainValueColumnNums[o]]; } + + addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, + RowTestObjectsMultiSet.RowFlag.FULL_OUTER); } } } @@ -483,67 +1700,354 @@ private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescript return expectedTestRowMultiSet; } - private void executeTest(MapJoinTestDescription testDesc, MapJoinTestData testData) throws Exception { + private void generateBigAndSmallTableRowLogLines(MapJoinTestDescription testDesc, + MapJoinTestData testData) throws HiveException { + + // Generate Big Table rows log lines... + VectorExtractRow vectorExtractRow = new VectorExtractRow(); + vectorExtractRow.init(testDesc.bigTableTypeInfos); + + final int bigTableColumnCount = testDesc.bigTableTypeInfos.length; + Object[] bigTableRowObjects = new Object[bigTableColumnCount]; + + /* + PrintStream big_ps; + try { + big_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_big"); + } catch (Exception e) { + throw new HiveException(e); + } + */ + + VectorRandomBatchSource bigTableBatchSource = testData.getBigTableBatchSource(); + VectorizedRowBatch batch = testData.getBigTableBatch(); + bigTableBatchSource.resetBatchIteration(); + while (bigTableBatchSource.fillNextBatch(batch)) { + + final int size = testData.bigTableBatch.size; + for (int r = 0; r < size; r++) { + vectorExtractRow.extractRow(testData.bigTableBatch, r, bigTableRowObjects); + + // big_ps.println(rowToCsvString(bigTableRowObjects)); + } + } + // big_ps.close(); + + /* + PrintStream small_ps; + try { + small_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_small"); + } catch (Exception e) { + throw new HiveException(e); + } + */ + + // Generate Small Table rows log lines... + final int keyKeyColumnNumsLength = + testDesc.bigTableKeyColumnNums.length; + final int smallTableRetainValueLength = + testDesc.smallTableRetainValueColumnNums.length; + final int smallTableLength = keyKeyColumnNumsLength + smallTableRetainValueLength; + for (Entry entry : testData.smallTableKeyHashMap.entrySet()) { + if (smallTableRetainValueLength == 0) { + Object[] smallTableRowObjects = entry.getKey().getRow(); + // small_ps.println(rowToCsvString(smallTableRowObjects)); + } else { + Integer valueIndex = entry.getValue(); + ArrayList valueList = testData.smallTableValues.get(valueIndex); + final int valueCount = valueList.size(); + for (int v = 0; v < valueCount; v++) { + Object[] smallTableRowObjects = new Object[smallTableLength]; + System.arraycopy(entry.getKey().getRow(), 0, smallTableRowObjects, 0, keyKeyColumnNumsLength); + int outputColumnNum = keyKeyColumnNumsLength; + Object[] valueRow = valueList.get(v).getRow(); + for (int o = 0; o < smallTableRetainValueLength; o++) { + smallTableRowObjects[outputColumnNum++] = + valueRow[testDesc.smallTableRetainValueColumnNums[o]]; + } + // small_ps.println(rowToCsvString(smallTableRowObjects)); + } + } + } + // small_ps.close(); + } + + private void executeTest(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { + + // So stack trace is self-explanatory. + switch (testDesc.vectorMapJoinVariation) { + case INNER: + executeTestInner(testDesc, testData, title); + break; + case INNER_BIG_ONLY: + executeTestInnerBigOnly(testDesc, testData, title); + break; + case LEFT_SEMI: + executeTestLeftSemi(testDesc, testData, title); + break; + case OUTER: + executeTestOuter(testDesc, testData, title); + break; + case FULL_OUTER: + executeTestFullOuter(testDesc, testData, title); + break; + default: + throw new RuntimeException("Unexpected Vector MapJoin variation " + + testDesc.vectorMapJoinVariation); + } + } + + private void executeTestInner(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { + doExecuteTest(testDesc, testData, title); + } + + private void executeTestInnerBigOnly(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { + doExecuteTest(testDesc, testData, title); + } + + private void executeTestLeftSemi(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { + doExecuteTest(testDesc, testData, title); + } + + private void executeTestOuter(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { + doExecuteTest(testDesc, testData, title); + } + + private void executeTestFullOuter(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { + doExecuteTest(testDesc, testData, title); + } + + private void doExecuteTest(MapJoinTestDescription testDesc, MapJoinTestData testData, + String title) throws Exception { RowTestObjectsMultiSet expectedTestRowMultiSet = createExpectedTestRowMultiSet(testDesc, testData); - // UNDONE: Inner count - System.out.println("*BENCHMARK* expectedTestRowMultiSet rowCount " + expectedTestRowMultiSet.getRowCount() + - " totalCount " + expectedTestRowMultiSet.getTotalCount()); + generateBigAndSmallTableRowLogLines(testDesc, testData); + + System.out.println("*BENCHMARK* expectedTestRowMultiSet " + + " totalKeyCount " + expectedTestRowMultiSet.getTotalKeyCount() + + " totalValueCount " + expectedTestRowMultiSet.getTotalValueCount()); // Execute all implementation variations. for (MapJoinTestImplementation mapJoinImplementation : MapJoinTestImplementation.values()) { - executeTestImplementation(mapJoinImplementation, testDesc, testData, - expectedTestRowMultiSet); + + if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER && + mapJoinImplementation == MapJoinTestImplementation.ROW_MODE_HASH_MAP) { + + // Key match tracking not supported in plain Java HashMap. + continue; + } + switch (mapJoinImplementation) { + case ROW_MODE_HASH_MAP: + executeRowModeHashMap( + testDesc, testData, + expectedTestRowMultiSet, + title); + break; + case ROW_MODE_OPTIMIZED: + executeRowModeOptimized( + testDesc, testData, + expectedTestRowMultiSet, + title); + break; + case VECTOR_PASS_THROUGH: + executeVectorPassThrough( + testDesc, testData, + expectedTestRowMultiSet, + title); + break; + case NATIVE_VECTOR_OPTIMIZED: + executeNativeVectorOptimized( + testDesc, testData, + expectedTestRowMultiSet, + title); + break; + case NATIVE_VECTOR_FAST: + executeNativeVectorFast( + testDesc, testData, + expectedTestRowMultiSet, + title); + break; + default: + throw new RuntimeException( + "Unexpected vector map join test variation"); + } } } - private boolean isVectorOutput(MapJoinTestImplementation mapJoinImplementation) { - return - (mapJoinImplementation != MapJoinTestImplementation.ROW_MODE_HASH_MAP && - mapJoinImplementation != MapJoinTestImplementation.ROW_MODE_OPTIMIZED); + private void executeRowModeHashMap( + MapJoinTestDescription testDesc, MapJoinTestData testData, + RowTestObjectsMultiSet expectedTestRowMultiSet, + String title) + throws Exception { + executeTestImplementation( + MapJoinTestImplementation.ROW_MODE_HASH_MAP, + testDesc, testData, + expectedTestRowMultiSet, + title); + } + + private void executeRowModeOptimized( + MapJoinTestDescription testDesc, MapJoinTestData testData, + RowTestObjectsMultiSet expectedTestRowMultiSet, + String title) + throws Exception { + executeTestImplementation( + MapJoinTestImplementation.ROW_MODE_OPTIMIZED, + testDesc, testData, + expectedTestRowMultiSet, + title); + } + + private void executeVectorPassThrough( + MapJoinTestDescription testDesc, MapJoinTestData testData, + RowTestObjectsMultiSet expectedTestRowMultiSet, + String title) + throws Exception { + executeTestImplementation( + MapJoinTestImplementation.VECTOR_PASS_THROUGH, + testDesc, testData, + expectedTestRowMultiSet, + title); + } + + private void executeNativeVectorOptimized( + MapJoinTestDescription testDesc, MapJoinTestData testData, + RowTestObjectsMultiSet expectedTestRowMultiSet, + String title) + throws Exception { + executeTestImplementation( + MapJoinTestImplementation.NATIVE_VECTOR_OPTIMIZED, + testDesc, testData, + expectedTestRowMultiSet, + title); + } + + private void executeNativeVectorFast( + MapJoinTestDescription testDesc, MapJoinTestData testData, + RowTestObjectsMultiSet expectedTestRowMultiSet, + String title) + throws Exception { + executeTestImplementation( + MapJoinTestImplementation.NATIVE_VECTOR_FAST, + testDesc, testData, + expectedTestRowMultiSet, + title); } private void executeTestImplementation( MapJoinTestImplementation mapJoinImplementation, - MapJoinTestDescription testDesc, MapJoinTestData testData, RowTestObjectsMultiSet expectedTestRowMultiSet) + MapJoinTestDescription testDesc, MapJoinTestData testData, + RowTestObjectsMultiSet expectedTestRowMultiSet, + String title) throws Exception { - System.out.println("*BENCHMARK* Starting " + mapJoinImplementation + " test"); + System.out.println("*BENCHMARK* Starting implementation " + mapJoinImplementation + + " variation " + testDesc.vectorMapJoinVariation + + " title " + title); // UNDONE: Parameterize for implementation variation? MapJoinDesc mapJoinDesc = MapJoinTestConfig.createMapJoinDesc(testDesc); - final boolean isVectorOutput = isVectorOutput(mapJoinImplementation); + final boolean isVectorOutput = MapJoinTestConfig.isVectorOutput(mapJoinImplementation); RowTestObjectsMultiSet outputTestRowMultiSet = new RowTestObjectsMultiSet(); - Operator testCollectorOperator = - (!isVectorOutput ? - new TestMultiSetCollectorOperator( - testDesc.outputObjectInspectors, outputTestRowMultiSet) : - new TestMultiSetVectorCollectorOperator( - testDesc.outputTypeInfos, testDesc.outputObjectInspectors, outputTestRowMultiSet)); - - MapJoinOperator operator = + CreateMapJoinResult result = MapJoinTestConfig.createMapJoinImplementation( - mapJoinImplementation, testDesc, testCollectorOperator, testData, mapJoinDesc); + mapJoinImplementation, testDesc, testData, mapJoinDesc); + MapJoinOperator mapJoinOperator = result.mapJoinOperator; + MapJoinTableContainer mapJoinTableContainer = result.mapJoinTableContainer; + MapJoinTableContainerSerDe mapJoinTableContainerSerDe = result.mapJoinTableContainerSerDe; + + CountCollectorTestOperator testCollectorOperator; + if (!isVectorOutput) { + testCollectorOperator = + new TestMultiSetCollectorOperator( + testDesc.outputObjectInspectors, outputTestRowMultiSet); + } else { + VectorizationContext vOutContext = + ((VectorizationContextRegion) mapJoinOperator).getOutputVectorizationContext(); + testCollectorOperator = + new TestMultiSetVectorCollectorOperator( + ArrayUtils.toPrimitive(vOutContext.getProjectedColumns().toArray(new Integer[0])), + testDesc.outputTypeInfos, testDesc.outputObjectInspectors, outputTestRowMultiSet); + } + + MapJoinTestConfig.connectOperators(mapJoinOperator, testCollectorOperator); + + CountCollectorTestOperator interceptTestCollectorOperator = null; + if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER && + !mapJoinDesc.isDynamicPartitionHashJoin()) { + + if (mapJoinImplementation == MapJoinTestImplementation.ROW_MODE_HASH_MAP) { + + // Not supported. + return; + } + + // Wire in FULL OUTER Intercept. + interceptTestCollectorOperator = + MapJoinTestConfig.addFullOuterIntercept( + mapJoinImplementation, testDesc, outputTestRowMultiSet, testData, + mapJoinOperator, mapJoinTableContainer, mapJoinTableContainerSerDe); + } else { + + // Invoke initializeOp methods. + mapJoinOperator.initialize( + testDesc.hiveConf, testDesc.inputObjectInspectors); + + // Fixup the mapJoinTables. + mapJoinOperator.setTestMapJoinTableContainer( + 1, mapJoinTableContainer, mapJoinTableContainerSerDe); + } if (!isVectorOutput) { - MapJoinTestData.driveBigTableData(testDesc, testData, operator); + MapJoinTestData.driveBigTableData(testDesc, testData, mapJoinOperator); } else { - MapJoinTestData.driveVectorBigTableData(testDesc, testData, operator); + MapJoinTestData.driveVectorBigTableData(testDesc, testData, mapJoinOperator); + } + + if (!testCollectorOperator.getIsClosed()) { + Assert.fail("collector operator not closed"); + } + if (testCollectorOperator.getIsAborted()) { + Assert.fail("collector operator aborted"); + } + if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER && + !mapJoinDesc.isDynamicPartitionHashJoin()) { + if (!interceptTestCollectorOperator.getIsClosed()) { + Assert.fail("intercept collector operator not closed"); + } + if (interceptTestCollectorOperator.getIsAborted()) { + Assert.fail("intercept collector operator aborted"); + } } System.out.println("*BENCHMARK* executeTestImplementation row count " + - ((CountCollectorTestOperator) testCollectorOperator).getRowCount()); + testCollectorOperator.getRowCount()); // Verify the output! - if (!expectedTestRowMultiSet.verify(outputTestRowMultiSet)) { - System.out.println("*BENCHMARK* verify failed for " + mapJoinImplementation); + String option = ""; + if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) { + option = " mapJoinPlanVariation " + testDesc.mapJoinPlanVariation.name(); + } + if (!expectedTestRowMultiSet.verify(outputTestRowMultiSet, "expected", "actual")) { + System.out.println("*BENCHMARK* " + title + " verify failed" + + " for implementation " + mapJoinImplementation + + " variation " + testDesc.vectorMapJoinVariation + option); + expectedTestRowMultiSet.displayDifferences(outputTestRowMultiSet, "expected", "actual"); } else { - System.out.println("*BENCHMARK* verify succeeded for " + mapJoinImplementation); + System.out.println("*BENCHMARK* " + title + " verify succeeded " + + " for implementation " + mapJoinImplementation + + " variation " + testDesc.vectorMapJoinVariation + option); } } } \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/CheckFastHashTable.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/CheckFastHashTable.java index 09dcb83e7b..d356588a9e 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/CheckFastHashTable.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/CheckFastHashTable.java @@ -30,10 +30,13 @@ import junit.framework.TestCase; import org.apache.hadoop.hive.ql.exec.JoinUtil; +import org.apache.hadoop.hive.ql.exec.persistence.MatchTracker; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMapResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashMultiSetResult; import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashSetResult; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinNonMatchedIterator; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.WriteBuffers; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.WritableComparator; @@ -197,25 +200,98 @@ public long getKey(int index) { return array[index].getValues(); } + private void verifyOne(VectorMapJoinFastLongHashMap map, int index, MatchTracker matchTracker) { + FastLongHashMapElement element = array[index]; + long longKey = element.getKey(); + List values = element.getValues(); + + VectorMapJoinHashMapResult hashMapResult = map.createHashMapResult(); + JoinUtil.JoinResult joinResult = map.lookup(longKey, hashMapResult, matchTracker); + if (joinResult != JoinUtil.JoinResult.MATCH) { + assertTrue(false); + } + + verifyHashMapValues(hashMapResult, values); + } + public void verify(VectorMapJoinFastLongHashMap map) { int mapSize = map.size(); if (mapSize != count) { TestCase.fail("map.size() does not match expected count"); } + for (int index = 0; index < count; index++) { + verifyOne(map, index, null); + } + } + + private int findKeyInArray(long searchLong) { + + // Brute force search. for (int index = 0; index < count; index++) { FastLongHashMapElement element = array[index]; - long key = element.getKey(); - List values = element.getValues(); + long longKey = element.getKey(); + if (longKey == searchLong) { + return index; + } + } + return -1; + } - VectorMapJoinHashMapResult hashMapResult = map.createHashMapResult(); - JoinUtil.JoinResult joinResult = map.lookup(key, hashMapResult); - if (joinResult != JoinUtil.JoinResult.MATCH) { - assertTrue(false); + // We assume there have been no reads/lookups before this call. + // And, keys are *UNIQUE*. + public void verifyNonMatched(VectorMapJoinFastLongHashMap map, Random random) + throws HiveException { + int mapSize = map.size(); + if (mapSize != count) { + TestCase.fail("map.size() does not match expected count"); + } + + MatchTracker matchTracker = map.createMatchTracker(); + boolean[] nonMatched = new boolean[mapSize]; + int nonMatchedCount = 0; + for (int index = 0; index < count; index++) { + nonMatched[index] = random.nextBoolean(); + if (!nonMatched[index]) { + verifyOne(map, index, matchTracker); + } else { + nonMatchedCount++; } + } + + boolean[] returnedNonMatched = new boolean[mapSize]; + int returnedNonMatchedCount = 0; + + VectorMapJoinNonMatchedIterator nonMatchedIterator = + map.createNonMatchedIterator(matchTracker); + nonMatchedIterator.init(); + while (nonMatchedIterator.findNextNonMatched()) { + boolean isNull = !nonMatchedIterator.readNonMatchedLongKey(); + if (isNull) { + TestCase.fail("NULL key found in expected keys"); + } + long longKey = nonMatchedIterator.getNonMatchedLongKey(); + int index = findKeyInArray(longKey); + if (index == -1) { + TestCase.fail("non-matched key not found in expected keys"); + } + if (!nonMatched[index]) { + TestCase.fail("non-matched key not one of the expected non-matched keys"); + } + if (returnedNonMatched[index]) { + TestCase.fail("non-matched key already returned"); + } + returnedNonMatched[index] = true; + returnedNonMatchedCount++; + VectorMapJoinHashMapResult hashMapResult = nonMatchedIterator.getNonMatchedHashMapResult(); + FastLongHashMapElement element = array[index]; + List values = element.getValues(); verifyHashMapValues(hashMapResult, values); } + if (nonMatchedCount != returnedNonMatchedCount) { + TestCase.fail("non-matched key count mismatch"); + } } } @@ -247,6 +323,11 @@ public int getValueCount() { public void addValue(byte[] value) { values.add(value); } + + @Override + public String toString() { + return "Key length " + key.length + ", value count " + values.size(); + } } /* @@ -310,25 +391,104 @@ public void add(byte[] key, byte[] value) { return array[index].getValues(); } + private void verifyOne(VectorMapJoinFastBytesHashMap map, int index, + MatchTracker matchTracker) { + FastBytesHashMapElement element = array[index]; + byte[] key = element.getKey(); + List values = element.getValues(); + + VectorMapJoinHashMapResult hashMapResult = map.createHashMapResult(); + JoinUtil.JoinResult joinResult = map.lookup(key, 0, key.length, hashMapResult, matchTracker); + if (joinResult != JoinUtil.JoinResult.MATCH) { + assertTrue(false); + } + + verifyHashMapValues(hashMapResult, values); + } + public void verify(VectorMapJoinFastBytesHashMap map) { int mapSize = map.size(); if (mapSize != count) { TestCase.fail("map.size() does not match expected count"); } + for (int index = 0; index < count; index++) { + verifyOne(map, index, null); + } + } + + private int findKeyInArray(byte[] searchKeyBytes, int searchKeyOffset, int searchKeyLength) { + + // Brute force search. for (int index = 0; index < count; index++) { FastBytesHashMapElement element = array[index]; - byte[] key = element.getKey(); - List values = element.getValues(); + byte[] keyBytes = element.getKey(); + if (keyBytes.length == searchKeyLength && + StringExpr.equal( + keyBytes, 0, keyBytes.length, + searchKeyBytes, searchKeyOffset, searchKeyLength)) { + return index; + } + } + return -1; + } - VectorMapJoinHashMapResult hashMapResult = map.createHashMapResult(); - JoinUtil.JoinResult joinResult = map.lookup(key, 0, key.length, hashMapResult); - if (joinResult != JoinUtil.JoinResult.MATCH) { - assertTrue(false); + // We assume there have been no reads/lookups before this call. + // And, keys are *UNIQUE*. + public void verifyNonMatched(VectorMapJoinFastBytesHashMap map, Random random) + throws HiveException { + int mapSize = map.size(); + if (mapSize != count) { + TestCase.fail("map.size() does not match expected count"); + } + + MatchTracker matchTracker = map.createMatchTracker(); + boolean[] nonMatched = new boolean[mapSize]; + int nonMatchedCount = 0; + for (int index = 0; index < count; index++) { + nonMatched[index] = random.nextBoolean(); + if (!nonMatched[index]) { + verifyOne(map, index, matchTracker); + } else { + nonMatchedCount++; } + } + + boolean[] returnedNonMatched = new boolean[mapSize]; + int returnedNonMatchedCount = 0; + VectorMapJoinNonMatchedIterator nonMatchedIterator = + map.createNonMatchedIterator(matchTracker); + nonMatchedIterator.init(); + + while (nonMatchedIterator.findNextNonMatched()) { + boolean isNull = !nonMatchedIterator.readNonMatchedBytesKey();; + if (isNull) { + TestCase.fail("NULL key found in expected keys"); + } + byte[] keyBytes = nonMatchedIterator.getNonMatchedBytes(); + int keyOffset = nonMatchedIterator.getNonMatchedBytesOffset(); + int keyLength = nonMatchedIterator.getNonMatchedBytesLength(); + int index = findKeyInArray(keyBytes, keyOffset, keyLength); + if (index == -1) { + TestCase.fail("non-matched key not found in expected keys"); + } + if (!nonMatched[index]) { + TestCase.fail("non-matched key not one of the expected non-matched keys"); + } + if (returnedNonMatched[index]) { + TestCase.fail("non-matched key already returned"); + } + returnedNonMatched[index] = true; + returnedNonMatchedCount++; + VectorMapJoinHashMapResult hashMapResult = nonMatchedIterator.getNonMatchedHashMapResult(); + FastBytesHashMapElement element = array[index]; + List values = element.getValues(); verifyHashMapValues(hashMapResult, values); } + if (nonMatchedCount != returnedNonMatchedCount) { + TestCase.fail("non-matched key count mismatch"); + } } } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashMapNonMatched.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashMapNonMatched.java new file mode 100644 index 0000000000..586c85048a --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashMapNonMatched.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; + +import java.io.IOException; +import java.util.Random; + +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastHashTable.VerifyFastBytesHashMap; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.junit.Test; + +/* + * An multi-key value hash map optimized for vector map join. + * + * The key is uninterpreted bytes. + */ +public class TestVectorMapJoinFastBytesHashMapNonMatched extends CommonFastHashTable { + + @Test + public void testOneKey() throws Exception { + random = new Random(82733); + + VectorMapJoinFastMultiKeyHashMap map = + new VectorMapJoinFastMultiKeyHashMap( + false,CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + + VerifyFastBytesHashMap verifyTable = new VerifyFastBytesHashMap(); + + byte[] key = new byte[random.nextInt(MAX_KEY_LENGTH)]; + random.nextBytes(key); + byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + + map.testPutRow(key, value); + verifyTable.add(key, value); + + // Second value. + value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + map.testPutRow(key, value); + verifyTable.add(key, value); + + // Third value. + value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + map.testPutRow(key, value); + verifyTable.add(key, value); + + verifyTable.verifyNonMatched(map, random); + } + + @Test + public void testMultipleKeysSingleValue() throws Exception { + random = new Random(29383); + + VectorMapJoinFastMultiKeyHashMap map = + new VectorMapJoinFastMultiKeyHashMap( + false,CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + + VerifyFastBytesHashMap verifyTable = new VerifyFastBytesHashMap(); + + int keyCount = 100 + random.nextInt(1000); + for (int i = 0; i < keyCount; i++) { + byte[] key = new byte[random.nextInt(MAX_KEY_LENGTH)]; + random.nextBytes(key); + if (!verifyTable.contains(key)) { + // Unique keys for this test. + break; + } + byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + + map.testPutRow(key, value); + verifyTable.add(key, value); + } + + verifyTable.verifyNonMatched(map, random); + } + + public void addAndVerifyMultipleKeyMultipleValue(int keyCount, + VectorMapJoinFastMultiKeyHashMap map, VerifyFastBytesHashMap verifyTable) + throws HiveException, IOException { + addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable, MAX_KEY_LENGTH, -1); + } + + public void addAndVerifyMultipleKeyMultipleValue(int keyCount, + VectorMapJoinFastMultiKeyHashMap map, VerifyFastBytesHashMap verifyTable, + int maxKeyLength, int fixedValueLength) + throws HiveException, IOException { + for (int i = 0; i < keyCount; i++) { + byte[] value; + if (fixedValueLength == -1) { + value = new byte[generateLargeCount() - 1]; + } else { + value = new byte[fixedValueLength]; + } + random.nextBytes(value); + + // Add a new key or add a value to an existing key? + if (random.nextBoolean() || verifyTable.getCount() == 0) { + byte[] key; + while (true) { + key = new byte[random.nextInt(maxKeyLength)]; + random.nextBytes(key); + if (!verifyTable.contains(key)) { + // Unique keys for this test. + break; + } + } + + map.testPutRow(key, value); + verifyTable.add(key, value); + } else { + byte[] randomExistingKey = verifyTable.addRandomExisting(value, random); + map.testPutRow(randomExistingKey, value); + } + } + + verifyTable.verifyNonMatched(map, random); + } + + @Test + public void testMultipleKeysMultipleValue() throws Exception { + random = new Random(9332); + + // Use a large capacity that doesn't require expansion, yet. + VectorMapJoinFastMultiKeyHashMap map = + new VectorMapJoinFastMultiKeyHashMap( + false,LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1); + + VerifyFastBytesHashMap verifyTable = new VerifyFastBytesHashMap(); + + int keyCount = 100; + addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable); + } + + @Test + public void testReallyBig() throws Exception { + random = new Random(42662); + + // Use a large capacity that doesn't require expansion, yet. + VectorMapJoinFastMultiKeyHashMap map = + new VectorMapJoinFastMultiKeyHashMap( + false, LARGE_CAPACITY, LOAD_FACTOR, MODERATE_WB_SIZE, -1); + + VerifyFastBytesHashMap verifyTable = new VerifyFastBytesHashMap(); + + int keyCount = 100; + addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable); + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashSet.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashSet.java index cbd77d13d1..6fccde9600 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashSet.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastBytesHashSet.java @@ -37,7 +37,7 @@ public void testOneKey() throws Exception { VectorMapJoinFastMultiKeyHashSet map = new VectorMapJoinFastMultiKeyHashSet( - false,CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + false, CAPACITY, LOAD_FACTOR, WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); @@ -65,7 +65,7 @@ public void testMultipleKeysSingleValue() throws Exception { VectorMapJoinFastMultiKeyHashSet map = new VectorMapJoinFastMultiKeyHashSet( - false,CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + false, CAPACITY, LOAD_FACTOR, WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); @@ -91,7 +91,7 @@ public void testGetNonExistent() throws Exception { VectorMapJoinFastMultiKeyHashSet map = new VectorMapJoinFastMultiKeyHashSet( - false,CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + false, CAPACITY, LOAD_FACTOR, WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); @@ -125,7 +125,8 @@ public void testFullMap() throws Exception { // Make sure the map does not expand; should be able to find space. VectorMapJoinFastMultiKeyHashSet map = - new VectorMapJoinFastMultiKeyHashSet(false,CAPACITY, 1f, WB_SIZE, -1); + new VectorMapJoinFastMultiKeyHashSet( + false, CAPACITY, 1f, WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); @@ -167,7 +168,8 @@ public void testExpand() throws Exception { // Start with capacity 1; make sure we expand on every put. VectorMapJoinFastMultiKeyHashSet map = - new VectorMapJoinFastMultiKeyHashSet(false,1, 0.0000001f, WB_SIZE, -1); + new VectorMapJoinFastMultiKeyHashSet( + false, 1, 0.0000001f, WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); @@ -227,7 +229,7 @@ public void testMultipleKeysMultipleValue() throws Exception { // Use a large capacity that doesn't require expansion, yet. VectorMapJoinFastMultiKeyHashSet map = new VectorMapJoinFastMultiKeyHashSet( - false,LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1); + false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); @@ -242,7 +244,7 @@ public void testLargeAndExpand() throws Exception { // Use a large capacity that doesn't require expansion, yet. VectorMapJoinFastMultiKeyHashSet map = new VectorMapJoinFastMultiKeyHashSet( - false,MODERATE_CAPACITY, LOAD_FACTOR, MODERATE_WB_SIZE, -1); + false, MODERATE_CAPACITY, LOAD_FACTOR, MODERATE_WB_SIZE, -1); VerifyFastBytesHashSet verifyTable = new VerifyFastBytesHashSet(); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastLongHashMapNonMatched.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastLongHashMapNonMatched.java new file mode 100644 index 0000000000..5847787b37 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/TestVectorMapJoinFastLongHashMapNonMatched.java @@ -0,0 +1,196 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast; + +import java.io.IOException; +import java.util.Random; + +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastHashTable.VerifyFastLongHashMap; +import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastLongHashMap; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class TestVectorMapJoinFastLongHashMapNonMatched extends CommonFastHashTable { + + @Test + public void testOneKey() throws Exception { + random = new Random(33221); + + VectorMapJoinFastLongHashMap map = + new VectorMapJoinFastLongHashMap( + false, false, HashTableKeyType.LONG, CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + + VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap(); + + long key = random.nextLong(); + byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + + map.testPutRow(key, value); + verifyTable.add(key, value); + + // Second value. + value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + map.testPutRow(key, value); + verifyTable.add(key, value); + + // Third value. + value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + map.testPutRow(key, value); + verifyTable.add(key, value); + + verifyTable.verifyNonMatched(map, random); + } + + @Test + public void testMultipleKeysSingleValue() throws Exception { + random = new Random(900); + + VectorMapJoinFastLongHashMap map = + new VectorMapJoinFastLongHashMap( + false, false, HashTableKeyType.LONG, CAPACITY, LOAD_FACTOR, WB_SIZE, -1); + + VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap(); + + int keyCount = 100 + random.nextInt(1000); + for (int i = 0; i < keyCount; i++) { + long key; + while (true) { + key = random.nextLong(); + if (!verifyTable.contains(key)) { + // Unique keys for this test. + break; + } + } + byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + + map.testPutRow(key, value); + verifyTable.add(key, value); + } + + verifyTable.verifyNonMatched(map, random); + } + + @Test + public void testExpand() throws Exception { + random = new Random(5227); + + // Start with capacity 1; make sure we expand on every put. + VectorMapJoinFastLongHashMap map = + new VectorMapJoinFastLongHashMap( + false, false, HashTableKeyType.LONG, 1, 0.0000001f, WB_SIZE, -1); + + VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap(); + + for (int i = 0; i < 18; ++i) { + long key; + while (true) { + key = random.nextLong(); + if (!verifyTable.contains(key)) { + // Unique keys for this test. + break; + } + } + byte[] value = new byte[random.nextInt(MAX_VALUE_LENGTH)]; + random.nextBytes(value); + + map.testPutRow(key, value); + verifyTable.add(key, value); + } + + verifyTable.verifyNonMatched(map, random); + // assertEquals(1 << 18, map.getCapacity()); + } + + public void addAndVerifyMultipleKeyMultipleValue(int keyCount, + VectorMapJoinFastLongHashMap map, VerifyFastLongHashMap verifyTable) + throws HiveException, IOException { + addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable, -1); + } + + public void addAndVerifyMultipleKeyMultipleValue(int keyCount, + VectorMapJoinFastLongHashMap map, VerifyFastLongHashMap verifyTable, int fixedValueLength) + throws HiveException, IOException { + for (int i = 0; i < keyCount; i++) { + byte[] value; + if (fixedValueLength == -1) { + value = new byte[generateLargeCount() - 1]; + } else { + value = new byte[fixedValueLength]; + } + random.nextBytes(value); + + // Add a new key or add a value to an existing key? + if (random.nextBoolean() || verifyTable.getCount() == 0) { + long key; + while (true) { + key = random.nextLong(); + if (!verifyTable.contains(key)) { + // Unique keys for this test. + break; + } + } + + map.testPutRow(key, value); + verifyTable.add(key, value); + } else { + long randomExistingKey = verifyTable.addRandomExisting(value, random); + map.testPutRow(randomExistingKey, value); + } + } + + verifyTable.verifyNonMatched(map, random); + } + + @Test + public void testMultipleKeysMultipleValue() throws Exception { + random = new Random(8); + + // Use a large capacity that doesn't require expansion, yet. + VectorMapJoinFastLongHashMap map = + new VectorMapJoinFastLongHashMap( + false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1); + + VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap(); + + int keyCount = 100; + addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable); + } + + @Test + public void testLargeAndExpand() throws Exception { + random = new Random(20); + + // Use a large capacity that doesn't require expansion, yet. + VectorMapJoinFastLongHashMap map = + new VectorMapJoinFastLongHashMap( + false, false, HashTableKeyType.LONG, MODERATE_CAPACITY, LOAD_FACTOR, MODERATE_WB_SIZE, -1); + + VerifyFastLongHashMap verifyTable = new VerifyFastLongHashMap(); + + int keyCount = 100; + addAndVerifyMultipleKeyMultipleValue(keyCount, map, verifyTable); + } +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java index c908f663ee..9615bf30ae 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java @@ -599,8 +599,6 @@ public static Object deserializeReadComplexType(DeserializeRead deserializeRead, return getComplexField(deserializeRead, typeInfo); } - static int fake = 0; - private static Object getComplexField(DeserializeRead deserializeRead, TypeInfo typeInfo) throws IOException { switch (typeInfo.getCategory()) { diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java index 793a6762de..ff8884172d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java @@ -23,8 +23,10 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; @@ -109,15 +111,25 @@ public static GenerateCategory generateCategoryFromPrimitiveCategory(PrimitiveCa } private GenerateCategory category; + private boolean allowNulls; public GenerateType(GenerateCategory category) { this.category = category; } + public GenerateType(GenerateCategory category, boolean allowNulls) { + this.category = category; + this.allowNulls = allowNulls; + } + public GenerateCategory getCategory() { return category; } + public boolean getAllowNulls() { + return allowNulls; + } + /* * BOOLEAN .. LONG: Min and max. */ @@ -180,6 +192,7 @@ public void assignColumnVectors(VectorizedRowBatch batch, int columnNum, case SHORT: case INT: case LONG: + case DATE: colVector = new LongColumnVector(); break; @@ -189,16 +202,22 @@ public void assignColumnVectors(VectorizedRowBatch batch, int columnNum, break; case STRING: + case CHAR: + case VARCHAR: + case BINARY: colVector = new BytesColumnVector(); break; - // UNDONE - case DATE: case TIMESTAMP: - case BINARY: + colVector = new TimestampColumnVector(); + break; + case DECIMAL: - case VARCHAR: - case CHAR: + colVector = new DecimalColumnVector(38, 18); + break; + + // UNDONE + case LIST: case MAP: case STRUCT: diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java index 9bf9d9d169..f200aa26e6 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java @@ -22,21 +22,28 @@ import java.util.Arrays; import java.util.Random; +import org.apache.hadoop.hive.common.type.Date; +import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde2.RandomTypeUtil; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType; import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory; +import org.apache.hadoop.hive.serde2.io.DateWritableV2; +import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; public class VectorColumnGroupGenerator { private GenerateType[] generateTypes; private int[] columnNums; private Object[] arrays; + private boolean[][] isNullArrays; public VectorColumnGroupGenerator(int columnNum, GenerateType generateType) { columnNums = new int[] {columnNum}; @@ -59,6 +66,7 @@ public VectorColumnGroupGenerator(int startColumnNum, GenerateType[] generateTyp private void allocateArrays(int size) { arrays = new Object[generateTypes.length]; + isNullArrays = new boolean[generateTypes.length][]; for (int i = 0; i < generateTypes.length; i++) { GenerateType generateType = generateTypes[i]; GenerateCategory category = generateType.getCategory(); @@ -88,24 +96,34 @@ private void allocateArrays(int size) { case STRING: array = new String[size]; break; + case BINARY: + array = new byte[size][]; + break; + case DATE: + array = new Date[size]; + break; case TIMESTAMP: array = new Timestamp[size]; break; - - // UNDONE - case DATE: - case BINARY: - case DECIMAL: - case VARCHAR: case CHAR: + array = new HiveChar[size]; + break; + case VARCHAR: + array = new HiveVarchar[size]; + break; + case DECIMAL: + array = new HiveDecimalWritable[size]; + break; case LIST: case MAP: case STRUCT: case UNION: default: + throw new RuntimeException("Unexpected generate category " + category); } arrays[i] = array; + isNullArrays[i] = new boolean[size]; } } @@ -139,16 +157,24 @@ public void clearColumnValueArrays() { case STRING: Arrays.fill(((String[]) array), null); break; + case BINARY: + Arrays.fill(((byte[][]) array), null); + break; + case DATE: + Arrays.fill(((Date[]) array), null); + break; case TIMESTAMP: Arrays.fill(((Timestamp[]) array), null); break; - - // UNDONE - case DATE: - case BINARY: - case DECIMAL: - case VARCHAR: case CHAR: + Arrays.fill(((HiveChar[]) array), null); + break; + case VARCHAR: + Arrays.fill(((HiveVarchar[]) array), null); + break; + case DECIMAL: + Arrays.fill(((HiveDecimalWritable[]) array), null); + break; case LIST: case MAP: @@ -168,6 +194,11 @@ public void generateRowValues(int rowIndex, Random random) { private void generateRowColumnValue(int rowIndex, int columnIndex, Random random) { GenerateType generateType = generateTypes[columnIndex]; GenerateCategory category = generateType.getCategory(); + boolean allowNulls = generateType.getAllowNulls(); + if (allowNulls && random.nextInt(100) < 5) { + isNullArrays[columnIndex][rowIndex] = true; + return; + } Object array = arrays[columnIndex]; switch (category) { case BOOLEAN: @@ -228,6 +259,20 @@ private void generateRowColumnValue(int rowIndex, int columnIndex, Random random } break; + case BINARY: + { + byte[] value = RandomTypeUtil.getRandBinary(random, 10); + ((byte[][]) array)[rowIndex] = value; + } + break; + + case DATE: + { + Date value = RandomTypeUtil.getRandDate(random); + ((Date[]) array)[rowIndex] = value; + } + break; + case TIMESTAMP: { Timestamp value = RandomTypeUtil.getRandTimestamp(random).toSqlTimestamp(); @@ -235,14 +280,31 @@ private void generateRowColumnValue(int rowIndex, int columnIndex, Random random } break; - // UNDONE - case DATE: - // UNDONE: Needed to longTest? + case CHAR: + { + // UNDONE: Use CharTypeInfo.maxLength + HiveChar value = + new HiveChar(RandomTypeUtil.getRandString(random), 10); + ((HiveChar[]) array)[rowIndex] = value; + } + break; - case BINARY: - case DECIMAL: case VARCHAR: - case CHAR: + { + // UNDONE: Use VarcharTypeInfo.maxLength + HiveVarchar value = + new HiveVarchar(RandomTypeUtil.getRandString(random), 10); + ((HiveVarchar[]) array)[rowIndex] = value; + } + break; + + case DECIMAL: + { + HiveDecimalWritable value = + new HiveDecimalWritable(RandomTypeUtil.getRandHiveDecimal(random)); + ((HiveDecimalWritable[]) array)[rowIndex] = value; + } + break; case LIST: case MAP: @@ -261,7 +323,15 @@ public void fillDownRowValues(int rowIndex, int seriesCount, Random random) { private void fillDownRowColumnValue(int rowIndex, int columnIndex, int seriesCount, Random random) { GenerateType generateType = generateTypes[columnIndex]; GenerateCategory category = generateType.getCategory(); + boolean allowNulls = generateType.getAllowNulls(); Object array = arrays[columnIndex]; + boolean[] isNull = isNullArrays[columnIndex]; + if (allowNulls && isNull[rowIndex]) { + for (int i = 1; i < seriesCount; i++) { + isNull[rowIndex + i] = true; + } + return; + } switch (category) { case BOOLEAN: { @@ -335,6 +405,24 @@ private void fillDownRowColumnValue(int rowIndex, int columnIndex, int seriesCou } } break; + case BINARY: + { + byte[][] byteArrayArray = ((byte[][]) array); + byte[] value = byteArrayArray[rowIndex]; + for (int i = 1; i < seriesCount; i++) { + byteArrayArray[rowIndex + i] = value; + } + } + break; + case DATE: + { + Date[] dateArray = ((Date[]) array); + Date value = dateArray[rowIndex]; + for (int i = 1; i < seriesCount; i++) { + dateArray[rowIndex + i] = value; + } + } + break; case TIMESTAMP: { Timestamp[] timestampArray = ((Timestamp[]) array); @@ -344,14 +432,33 @@ private void fillDownRowColumnValue(int rowIndex, int columnIndex, int seriesCou } } break; - - // UNDONE - case DATE: - - case BINARY: - case DECIMAL: - case VARCHAR: case CHAR: + { + HiveChar[] hiveCharArray = ((HiveChar[]) array); + HiveChar value = hiveCharArray[rowIndex]; + for (int i = 1; i < seriesCount; i++) { + hiveCharArray[rowIndex + i] = value; + } + } + break; + case VARCHAR: + { + HiveVarchar[] hiveVarcharArray = ((HiveVarchar[]) array); + HiveVarchar value = hiveVarcharArray[rowIndex]; + for (int i = 1; i < seriesCount; i++) { + hiveVarcharArray[rowIndex + i] = value; + } + } + break; + case DECIMAL: + { + HiveDecimalWritable[] hiveDecimalWritableArray = ((HiveDecimalWritable[]) array); + HiveDecimalWritable value = hiveDecimalWritableArray[rowIndex]; + for (int i = 1; i < seriesCount; i++) { + hiveDecimalWritableArray[rowIndex + i] = value; + } + } + break; case LIST: case MAP: @@ -387,6 +494,16 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde GenerateType generateType = generateTypes[logicalColumnIndex]; GenerateCategory category = generateType.getCategory(); + boolean allowNulls = generateType.getAllowNulls(); + boolean[] isNull = isNullArrays[logicalColumnIndex]; + if (allowNulls) { + for (int i = 0; i < size; i++) { + if (isNull[i]) { + colVector.isNull[i] = true; + colVector.noNulls = false; + } + } + } Object array = arrays[logicalColumnIndex]; switch (category) { case BOOLEAN: @@ -394,7 +511,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde boolean[] booleanArray = ((boolean[]) array); long[] vector = ((LongColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = (booleanArray[i] ? 1 : 0); + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = (booleanArray[i] ? 1 : 0); + } } } break; @@ -403,7 +524,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde byte[] byteArray = ((byte[]) array); long[] vector = ((LongColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = byteArray[i]; + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = byteArray[i]; + } } } break; @@ -412,7 +537,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde short[] shortArray = ((short[]) array); long[] vector = ((LongColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = shortArray[i]; + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = shortArray[i]; + } } } break; @@ -421,7 +550,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde int[] intArray = ((int[]) array); long[] vector = ((LongColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = intArray[i]; + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = intArray[i]; + } } } break; @@ -430,7 +563,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde long[] longArray = ((long[]) array); long[] vector = ((LongColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = longArray[i]; + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = longArray[i]; + } } } break; @@ -439,7 +576,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde float[] floatArray = ((float[]) array); double[] vector = ((DoubleColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = floatArray[i]; + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = floatArray[i]; + } } } break; @@ -448,7 +589,11 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde double[] doubleArray = ((double[]) array); double[] vector = ((DoubleColumnVector) colVector).vector; for (int i = 0; i < size; i++) { - vector[i] = doubleArray[i]; + if (isNull[i]) { + vector[i] = 0; + } else { + vector[i] = doubleArray[i]; + } } } break; @@ -457,8 +602,35 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde String[] stringArray = ((String[]) array); BytesColumnVector bytesColVec = ((BytesColumnVector) colVector); for (int i = 0; i < size; i++) { - byte[] bytes = stringArray[i].getBytes(); - bytesColVec.setVal(i, bytes); + if (!isNull[i]) { + byte[] bytes = stringArray[i].getBytes(); + bytesColVec.setVal(i, bytes); + } + } + } + break; + case BINARY: + { + byte[][] byteArrayArray = ((byte[][]) array); + BytesColumnVector bytesColVec = ((BytesColumnVector) colVector); + for (int i = 0; i < size; i++) { + if (!isNull[i]) { + byte[] bytes = byteArrayArray[i]; + bytesColVec.setVal(i, bytes); + } + } + } + break; + case DATE: + { + Date[] dateArray = ((Date[]) array); + LongColumnVector longColVec = ((LongColumnVector) colVector); + for (int i = 0; i < size; i++) { + if (!isNull[i]) { + Date date = dateArray[i]; + longColVec.vector[i] = + DateWritableV2.dateToDays(date); + } } } break; @@ -467,26 +639,58 @@ private void populateBatchColumn(VectorizedRowBatch batch, int logicalColumnInde Timestamp[] timestampArray = ((Timestamp[]) array); TimestampColumnVector timestampColVec = ((TimestampColumnVector) colVector); for (int i = 0; i < size; i++) { - Timestamp timestamp = timestampArray[i]; - timestampColVec.set(i, timestamp); + if (!isNull[i]) { + Timestamp timestamp = timestampArray[i]; + timestampColVec.set(i, timestamp); + } + } + } + break; + case CHAR: + { + HiveChar[] hiveCharArray = ((HiveChar[]) array); + BytesColumnVector bytesColVec = ((BytesColumnVector) colVector); + for (int i = 0; i < size; i++) { + if (!isNull[i]) { + byte[] bytes = hiveCharArray[i].getValue().getBytes(); + bytesColVec.setVal(i, bytes); + } + } + } + break; + case VARCHAR: + { + HiveVarchar[] hiveCharArray = ((HiveVarchar[]) array); + BytesColumnVector bytesColVec = ((BytesColumnVector) colVector); + for (int i = 0; i < size; i++) { + if (!isNull[i]) { + byte[] bytes = hiveCharArray[i].getValue().getBytes(); + bytesColVec.setVal(i, bytes); + } + } + } + break; + case DECIMAL: + { + HiveDecimalWritable[] hiveDecimalWritableArray = ((HiveDecimalWritable[]) array); + DecimalColumnVector decimalColVec = ((DecimalColumnVector) colVector); + for (int i = 0; i < size; i++) { + if (!isNull[i]) { + HiveDecimalWritable decWritable = hiveDecimalWritableArray[i]; + decimalColVec.set(i, decWritable); + } } } break; // UNDONE - case DATE: - - case BINARY: - case DECIMAL: - case VARCHAR: - case CHAR: - case LIST: case MAP: case STRUCT: case UNION: default: + throw new RuntimeException("Unepected generate category " + category); } } } \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestCounterMapping.java ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestCounterMapping.java index b705fd7f88..fdc870128a 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestCounterMapping.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestCounterMapping.java @@ -146,7 +146,7 @@ public void testUsageOfRuntimeInfo() throws ParseException { FilterOperator filter2 = filters2.get(0); assertEquals("original check", 7, filter1.getStatistics().getNumRows()); - assertEquals("optimized check", 6, filter2.getStatistics().getNumRows()); + assertEquals("optimized check", 1, filter2.getStatistics().getNumRows()); } diff --git ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFEvaluator.java ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFEvaluator.java index 0747fa15d0..d3df170395 100644 --- ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFEvaluator.java +++ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDAFEvaluator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.PTFPartition; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef; @@ -55,10 +56,10 @@ @Test public void testGetPartitionWindowingEvaluatorWithoutInitCall() { BasePartitionEvaluator partition1Evaluator1 = udafEvaluator.getPartitionWindowingEvaluator( - winFrame, partition1, parameters, outputOI); + winFrame, partition1, parameters, outputOI, false); BasePartitionEvaluator partition1Evaluator2 = udafEvaluator.getPartitionWindowingEvaluator( - winFrame, partition1, parameters, outputOI); + winFrame, partition1, parameters, outputOI, false); Assert.assertEquals(partition1Evaluator1, partition1Evaluator2); } @@ -66,12 +67,12 @@ public void testGetPartitionWindowingEvaluatorWithoutInitCall() { @Test public void testGetPartitionWindowingEvaluatorWithInitCall() throws HiveException { BasePartitionEvaluator partition1Evaluator1 = udafEvaluator.getPartitionWindowingEvaluator( - winFrame, partition1, parameters, outputOI); + winFrame, partition1, parameters, outputOI, false); udafEvaluator.init(GenericUDAFEvaluator.Mode.COMPLETE, null); BasePartitionEvaluator newPartitionEvaluator = udafEvaluator.getPartitionWindowingEvaluator( - winFrame, partition1, parameters, outputOI); + winFrame, partition1, parameters, outputOI, false); Assert.assertNotEquals(partition1Evaluator1, newPartitionEvaluator); } diff --git ql/src/test/queries/clientnegative/join32.q ql/src/test/queries/clientnegative/join32.q index 8c9391451b..3b6babb4d3 100644 --- ql/src/test/queries/clientnegative/join32.q +++ ql/src/test/queries/clientnegative/join32.q @@ -2,6 +2,9 @@ --! qt:dataset:src1 --! qt:dataset:src set hive.cbo.enable=false; + +-- SORT_QUERY_RESULTS + CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE; -- Mapjoin followed by Mapjoin is not supported. diff --git ql/src/test/queries/clientpositive/acid_stats2.q ql/src/test/queries/clientpositive/acid_stats2.q index cf96731985..8c40fa78a2 100644 --- ql/src/test/queries/clientpositive/acid_stats2.q +++ ql/src/test/queries/clientpositive/acid_stats2.q @@ -16,6 +16,8 @@ set hive.support.concurrency=true; set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; set hive.query.results.cache.enabled=false; +set hive.exec.dynamic.partition.mode=nonstrict; + create table stats3(key int,value string) stored as orc tblproperties ("transactional"="true"); insert into table stats3 values (1, "foo"); explain select count(*) from stats3; @@ -39,4 +41,29 @@ delete from stats3 where key = 2; explain select count(*) from stats3; select count(*) from stats3; -drop table stats3; \ No newline at end of file +drop table stats3; + +create table stats4(key int,value string) partitioned by (ds string) clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true"); +insert into table stats4 partition (ds) values (12341234, 'bob', 'today'),(123471234871239847, 'bob', 'today'),(431, 'tracy', 'tomorrow'); +desc formatted stats4; +desc formatted stats4 partition(ds='tomorrow'); +desc formatted stats4 partition(ds='today'); +explain select count(*) from stats4; +select count(*) from stats4; +delete from stats4 where value = 'tracy' and ds = 'tomorrow'; +desc formatted stats4; +desc formatted stats4 partition(ds='tomorrow'); +desc formatted stats4 partition(ds='today'); +explain select count(*) from stats4; +select count(*) from stats4; +explain select count(*) from stats4 where ds = 'tomorrow'; +select count(*) from stats4 where ds = 'tomorrow'; +delete from stats4 where key > 12341234 and ds = 'today'; +desc formatted stats4; +desc formatted stats4 partition(ds='tomorrow'); +desc formatted stats4 partition(ds='today'); +explain select count(*) from stats4; +select count(*) from stats4; +explain select count(*) from stats4 where ds = 'tomorrow'; +select count(*) from stats4 where ds = 'tomorrow'; +drop table stats4; diff --git ql/src/test/queries/clientpositive/auto_join_filters.q ql/src/test/queries/clientpositive/auto_join_filters.q index ea028f61c2..9282f8ffb3 100644 --- ql/src/test/queries/clientpositive/auto_join_filters.q +++ ql/src/test/queries/clientpositive/auto_join_filters.q @@ -7,7 +7,13 @@ LOAD DATA LOCAL INPATH '../../data/files/in3.txt' INTO TABLE myinput1_n5; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a LEFT OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=false; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.merge.nway.joins=true; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; @@ -24,10 +30,19 @@ SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOI SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=false; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.merge.nway.joins=true; SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n5 a LEFT OUTER JOIN myinput1_n5 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1_n5 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value); SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1_n5 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value); @@ -51,7 +66,12 @@ SET hive.input.format = org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a LEFT OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=false; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.merge.nway.joins=true; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; @@ -68,10 +88,19 @@ SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOI SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=false; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +SET hive.merge.nway.joins=true; SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n5 a LEFT OUTER JOIN myinput1_n5 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1_n5 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value); SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n5 a RIGHT OUTER JOIN myinput1_n5 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1_n5 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value); diff --git ql/src/test/queries/clientpositive/auto_join_nulls.q ql/src/test/queries/clientpositive/auto_join_nulls.q index 4a2b57b657..a3851288e4 100644 --- ql/src/test/queries/clientpositive/auto_join_nulls.q +++ ql/src/test/queries/clientpositive/auto_join_nulls.q @@ -19,10 +19,19 @@ SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a RIGHT OUTER JOI SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a RIGHT OUTER JOIN myinput1_n2 b ON a.key = b.key; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a RIGHT OUTER JOIN myinput1_n2 b ON a.value = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a RIGHT OUTER JOIN myinput1_n2 b ON a.key=b.key and a.value = b.value; +SET hive.mapjoin.full.outer=false; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.key; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value and a.key=b.key; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.key; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value; +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value and a.key=b.key; +SET hive.merge.nway.joins=true; SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n2 a LEFT OUTER JOIN myinput1_n2 b ON (a.value=b.value) RIGHT OUTER JOIN myinput1_n2 c ON (b.value=c.value); SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n2 a RIGHT OUTER JOIN myinput1_n2 b ON (a.value=b.value) LEFT OUTER JOIN myinput1_n2 c ON (b.value=c.value); diff --git ql/src/test/queries/clientpositive/bucket_map_join_tez1.q ql/src/test/queries/clientpositive/bucket_map_join_tez1.q index 82480351cc..049a4d9571 100644 --- ql/src/test/queries/clientpositive/bucket_map_join_tez1.q +++ ql/src/test/queries/clientpositive/bucket_map_join_tez1.q @@ -18,7 +18,7 @@ load data local inpath '../../data/files/bmj/000001_0' INTO TABLE srcbucket_mapj load data local inpath '../../data/files/bmj/000002_0' INTO TABLE srcbucket_mapjoin_part_n15 partition(ds='2008-04-08'); load data local inpath '../../data/files/bmj/000003_0' INTO TABLE srcbucket_mapjoin_part_n15 partition(ds='2008-04-08'); - +-- SORT_QUERY_RESULTS set hive.optimize.bucketingsorting=false; insert overwrite table tab_part_n9 partition (ds='2008-04-08') diff --git ql/src/test/queries/clientpositive/correlationoptimizer1.q ql/src/test/queries/clientpositive/correlationoptimizer1.q index 1c4f82aedf..d61d175606 100644 --- ql/src/test/queries/clientpositive/correlationoptimizer1.q +++ ql/src/test/queries/clientpositive/correlationoptimizer1.q @@ -216,6 +216,7 @@ set hive.optimize.correlation=false; -- they share the same key. Because those keys with a null value are not grouped -- in the output of the Full Outer Join, we cannot use a single MR to execute -- these two operators. +SET hive.mapjoin.full.outer=false; EXPLAIN SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt @@ -227,7 +228,35 @@ FROM (SELECT x.key AS key, count(1) AS cnt FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) GROUP BY x.key) tmp; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp; + +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp; +SET hive.merge.nway.joins=true; + set hive.optimize.correlation=true; +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp; + +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp; + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt @@ -238,6 +267,7 @@ SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) GROUP BY x.key) tmp; +SET hive.merge.nway.joins=true; set hive.auto.convert.join=false; set hive.optimize.correlation=false; diff --git ql/src/test/queries/clientpositive/correlationoptimizer2.q ql/src/test/queries/clientpositive/correlationoptimizer2.q index 66560841b4..cbb6e47ee8 100644 --- ql/src/test/queries/clientpositive/correlationoptimizer2.q +++ ql/src/test/queries/clientpositive/correlationoptimizer2.q @@ -96,6 +96,7 @@ FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 set hive.optimize.correlation=false; -- Full Outer Join should be handled. +SET hive.mapjoin.full.outer=false; EXPLAIN SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 @@ -109,7 +110,39 @@ FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b ON (a.key = b.key)) tmp; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp; + +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp; +SET hive.merge.nway.joins=true; + set hive.optimize.correlation=true; +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp; + +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp; + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 @@ -122,8 +155,28 @@ FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b ON (a.key = b.key)) tmp; +SET hive.merge.nway.joins=true; set hive.optimize.correlation=false; + +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp; + +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp; + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=true; EXPLAIN SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT a.key AS key, count(1) AS cnt @@ -138,11 +191,13 @@ FROM (SELECT a.key AS key, count(1) AS cnt FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b ON (a.key = b.key) GROUP BY a.key) tmp; +SET hive.merge.nway.joins=false; set hive.optimize.correlation=true; -- After FULL OUTER JOIN, keys with null values are not grouped, right now, -- we have to generate 2 MR jobs for tmp, 1 MR job for a join b and another for the -- GroupByOperator on key. +SET hive.mapjoin.full.outer=false; EXPLAIN SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT a.key AS key, count(1) AS cnt @@ -158,11 +213,30 @@ FROM (SELECT a.key AS key, count(1) AS cnt ON (a.key = b.key) GROUP BY a.key) tmp; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp; + +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp; +SET hive.merge.nway.joins=true; + set hive.optimize.correlation=false; -- When Correlation Optimizer is turned off, we need 4 MR jobs. -- When Correlation Optimizer is turned on, the subquery of tmp will be evaluated in -- a single MR job (including the subquery a, the subquery b, and a join b). So, we -- will have 2 MR jobs. +SET hive.mapjoin.full.outer=false; EXPLAIN SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 @@ -176,7 +250,39 @@ FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b ON (a.key = b.key)) tmp; +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; +EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp; + +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp; +SET hive.merge.nway.joins=true; + set hive.optimize.correlation=true; +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp; + +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp; + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 @@ -189,3 +295,4 @@ FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b ON (a.key = b.key)) tmp; +SET hive.merge.nway.joins=true; diff --git ql/src/test/queries/clientpositive/correlationoptimizer4.q ql/src/test/queries/clientpositive/correlationoptimizer4.q index c34ff237e6..ee95abfe84 100644 --- ql/src/test/queries/clientpositive/correlationoptimizer4.q +++ ql/src/test/queries/clientpositive/correlationoptimizer4.q @@ -1,4 +1,5 @@ set hive.mapred.mode=nonstrict; + CREATE TABLE T1_n146(key INT, val STRING); LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1_n146; CREATE TABLE T2_n86(key INT, val STRING); @@ -24,7 +25,7 @@ FROM (SELECT y.key AS key, count(1) AS cnt GROUP BY y.key) tmp; set hive.optimize.correlation=true; -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key = z.key) @@ -38,7 +39,7 @@ FROM (SELECT y.key AS key, count(1) AS cnt set hive.optimize.correlation=true; set hive.auto.convert.join=true; -- Enable hive.auto.convert.join. -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key = z.key) @@ -53,7 +54,7 @@ set hive.auto.convert.join=false; set hive.optimize.correlation=false; -- This case should be optimized, since the key of GroupByOperator is from the leftmost table -- of a chain of LEFT OUTER JOINs. -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -65,7 +66,7 @@ FROM (SELECT x.key AS key, count(1) AS cnt GROUP BY x.key) tmp; set hive.optimize.correlation=true; -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -80,7 +81,7 @@ set hive.optimize.correlation=true; -- This query will not be optimized by correlation optimizer because -- GroupByOperator uses y.key (a right table of a left outer join) -- as the key. -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -94,7 +95,7 @@ FROM (SELECT y.key AS key, count(1) AS cnt set hive.optimize.correlation=false; -- This case should be optimized, since the key of GroupByOperator is from the rightmost table -- of a chain of RIGHT OUTER JOINs. -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT z.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -106,7 +107,7 @@ FROM (SELECT z.key AS key, count(1) AS cnt GROUP BY z.key) tmp; set hive.optimize.correlation=true; -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT z.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -121,7 +122,7 @@ set hive.optimize.correlation=true; -- This query will not be optimized by correlation optimizer because -- GroupByOperator uses y.key (a left table of a right outer join) -- as the key. -EXPLAIN +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -135,7 +136,20 @@ FROM (SELECT y.key AS key, count(1) AS cnt set hive.optimize.correlation=false; -- This case should not be optimized because afer the FULL OUTER JOIN, rows with null keys -- are not grouped. -EXPLAIN +set hive.auto.convert.join=false; +EXPLAIN VECTORIZATION +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp; + +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp; + +set hive.auto.convert.join=true; +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -147,7 +161,8 @@ FROM (SELECT y.key AS key, count(1) AS cnt GROUP BY y.key) tmp; set hive.optimize.correlation=true; -EXPLAIN +set hive.auto.convert.join=false; +EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) @@ -157,3 +172,15 @@ SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp; + +set hive.auto.convert.join=true; +EXPLAIN VECTORIZATION +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp; + +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/create_view.q ql/src/test/queries/clientpositive/create_view.q index dce2866f8d..3fe14f4ecd 100644 --- ql/src/test/queries/clientpositive/create_view.q +++ ql/src/test/queries/clientpositive/create_view.q @@ -237,6 +237,8 @@ select * from view17; create view view18 as select v+1 from (select 1 as v) t; select * from view18; +-- create view if not exists +create view if not exists view18 as select v+1 from (select 1 as v) t; DROP VIEW view1; DROP VIEW view2; diff --git ql/src/test/queries/clientpositive/fullouter_mapjoin_1_optimized.q ql/src/test/queries/clientpositive/fullouter_mapjoin_1_optimized.q new file mode 100644 index 0000000000..32b2e0495f --- /dev/null +++ ql/src/test/queries/clientpositive/fullouter_mapjoin_1_optimized.q @@ -0,0 +1,290 @@ +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +set hive.vectorized.execution.enabled=false; +set hive.vectorized.execution.mapjoin.native.enabled=false; +set hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled=false; + +set hive.auto.convert.join=true; +SET hive.auto.convert.join.noconditionaltask=true; +set hive.stats.fetch.column.stats=false; + +------------------------------------------------------------------------------------------ +-- FULL OUTER Vectorized Native MapJoin variation for OPTIMIZED hash table implementation. +------------------------------------------------------------------------------------------ + +-- SORT_QUERY_RESULTS + +------------------------------------------------------------------------------------------ +-- DYNAMIC PARTITION HASH JOIN +------------------------------------------------------------------------------------------ + +set hive.optimize.dynamic.partition.hashjoin=true; + +set hive.mapjoin.hybridgrace.hashtable=false; + +-- NOTE: Use very small sizes here to skip SHARED MEMORY MapJoin and force usage +-- NOTE: of DYNAMIC PARTITION HASH JOIN instead. +set hive.auto.convert.join.noconditionaltask.size=500; +set hive.exec.reducers.bytes.per.reducer=500; + +------------------------------------------------------------------------------------------ +-- Single LONG key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt; +CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt; + +CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt; +CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt; + +CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt; +CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt; + +CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt; +CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt; + +analyze table fullouter_long_big_1a compute statistics; +analyze table fullouter_long_big_1a compute statistics for columns; +analyze table fullouter_long_big_1a_nonull compute statistics; +analyze table fullouter_long_big_1a_nonull compute statistics for columns; +analyze table fullouter_long_small_1a compute statistics; +analyze table fullouter_long_small_1a compute statistics for columns; +analyze table fullouter_long_small_1a_nonull compute statistics; +analyze table fullouter_long_small_1a_nonull compute statistics for columns; + +-- Do first one with FULL OUTER MapJoin NOT Enabled. +SET hive.mapjoin.full.outer=false; +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SET hive.mapjoin.full.outer=true; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b; + +CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b; + +analyze table fullouter_long_big_1b compute statistics; +analyze table fullouter_long_big_1b compute statistics for columns; +analyze table fullouter_long_small_1b compute statistics; +analyze table fullouter_long_small_1b compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c; + +CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c; + +analyze table fullouter_long_big_1c compute statistics; +analyze table fullouter_long_big_1c compute statistics for columns; +analyze table fullouter_long_small_1c compute statistics; +analyze table fullouter_long_small_1c compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d; + +CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d; + +analyze table fullouter_long_big_1d compute statistics; +analyze table fullouter_long_big_1d compute statistics for columns; +analyze table fullouter_long_small_1d compute statistics; +analyze table fullouter_long_small_1d compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + + +------------------------------------------------------------------------------------------ +-- MULTI-KEY key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt; +CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt; + +CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt; +CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt; + +CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt; +CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt; + +CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt; +CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt; + +analyze table fullouter_multikey_big_1a compute statistics; +analyze table fullouter_multikey_big_1a compute statistics for columns; +analyze table fullouter_multikey_big_1a_nonull compute statistics; +analyze table fullouter_multikey_big_1a_nonull compute statistics for columns; +analyze table fullouter_multikey_small_1a compute statistics; +analyze table fullouter_multikey_small_1a compute statistics for columns; +analyze table fullouter_multikey_small_1a_nonull compute statistics; +analyze table fullouter_multikey_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Big table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Small table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + + + + +CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt; +CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt; + +CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt; +CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt; + +analyze table fullouter_multikey_big_1b_txt compute statistics; +analyze table fullouter_multikey_big_1b_txt compute statistics for columns; +analyze table fullouter_multikey_small_1b_txt compute statistics; +analyze table fullouter_multikey_small_1b_txt compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + + +------------------------------------------------------------------------------------------ +-- Single STRING key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt; +CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt; + +CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt; +CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt; + +CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt; +CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt; + +CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt; +CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt; + +analyze table fullouter_string_big_1a compute statistics; +analyze table fullouter_string_big_1a compute statistics for columns; +analyze table fullouter_string_big_1a_nonull compute statistics; +analyze table fullouter_string_big_1a_nonull compute statistics for columns; +analyze table fullouter_string_small_1a compute statistics; +analyze table fullouter_string_small_1a compute statistics for columns; +analyze table fullouter_string_small_1a_nonull compute statistics; +analyze table fullouter_string_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + + + diff --git ql/src/test/queries/clientpositive/join32_lessSize.q ql/src/test/queries/clientpositive/join32_lessSize.q index fcadbe367a..b998ac77df 100644 --- ql/src/test/queries/clientpositive/join32_lessSize.q +++ ql/src/test/queries/clientpositive/join32_lessSize.q @@ -2,6 +2,7 @@ --! qt:dataset:src1 --! qt:dataset:src set hive.mapred.mode=nonstrict; + -- SORT_QUERY_RESULTS CREATE TABLE dest_j1_n21(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE; diff --git ql/src/test/queries/clientpositive/join33.q ql/src/test/queries/clientpositive/join33.q index 15275755ba..6ddf0eb6b1 100644 --- ql/src/test/queries/clientpositive/join33.q +++ ql/src/test/queries/clientpositive/join33.q @@ -2,6 +2,7 @@ --! qt:dataset:src1 --! qt:dataset:src set hive.mapred.mode=nonstrict; + -- SORT_QUERY_RESULTS CREATE TABLE dest_j1_n7(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE; diff --git ql/src/test/queries/clientpositive/manyViewJoin.q ql/src/test/queries/clientpositive/manyViewJoin.q new file mode 100644 index 0000000000..55dca832e6 --- /dev/null +++ ql/src/test/queries/clientpositive/manyViewJoin.q @@ -0,0 +1,8522 @@ +-- this test creates a scenario where there will be a 47 view join, that will create a plan +-- using map join and backup conditional tasks. this scenario is covered in hive-20489. + +drop table if exists test_hive_1035 purge; + +create table test_hive_1035 +( + test_hive_1018 string + ,test_hive_1004 string + ,test_hive_1025 string + ,test_hive_23 string + ,test_hive_27 string + ,test_hive_29 string + ,test_hive_30 string + ,test_hive_97 string + ,test_hive_96 string + ,test_hive_98 string + ,test_hive_101 string + ,test_hive_102 string + ,test_hive_109 string + ,test_hive_111 string + ,test_hive_112 string + ,test_hive_113 string + ,test_hive_114 string + ,test_hive_115 string + ,test_hive_78 string + ,test_hive_79 string + ,test_hive_24 string + ,test_hive_26 string + ,test_hive_110 string + ,test_hive_77 string + ,test_hive_87 string + ,test_hive_92 string + ,test_hive_90 string + ,test_hive_74 string + ,test_hive_85 string + ,test_hive_81 string + ,test_hive_82 string + ,test_hive_106 string + ,test_hive_107 string + ,test_hive_108 string + ,test_hive_75 string + ,test_hive_86 string + ,test_hive_76 string + ,test_hive_89 string + ,test_hive_88 string + ,test_hive_91 string + ,test_hive_71 string + ,test_hive_72 string + ,test_hive_73 string + ,test_hive_80 string + ,test_hive_103 string + ,test_hive_104 string + ,test_hive_1002 string + ,test_hive_1003 string + ,test_hive_25 string + ,test_hive_28 string + ,test_hive_93 string + ,test_hive_94 string + ,test_hive_95 string + ,test_hive_99 string + ,test_hive_105 string + ,test_hive_83 string + ,test_hive_84 string + ,test_hive_100 string + ,test_hive_1023 string + ,test_hive_1024 string + ,test_hive_1010 string + ,test_hive_1010_a_d string + ,test_hive_1010_a_g string + ,test_hive_1026 string + ,test_hive_1000 string + ,test_hive_1001 string + ,test_hive_1030 string + ,test_hive_1030_1 string + ,test_hive_1030_2 string + ,test_hive_1030_3 string + ,test_hive_1021 string + ,test_hive_1020 string + ,test_hive_1022 string + ,test_hive_1019 string + ,test_hive_1027 string + ,test_hive_1028 string + ,test_hive_1029 string + ,test_hive_1005 string + ,test_hive_1005_a_d string + ,test_hive_1005_psr string + ,test_hive_1005_psr_a_d string + ,test_hive_1005_psr_e string + ,test_hive_1013 string + ,test_hive_1013_a_d string + ,test_hive_1013_psr string + ,test_hive_1013_psr_a_d string + ,test_hive_1013_psr_e string + ,test_hive_1034 string +) +partitioned by (ds int, ts int) +stored as parquet; + +create table if not exists test_hive_1038 +( + test_hive_1018 string + ,test_hive_1004 string + ,test_hive_1025 string + ,test_hive_23 string + ,test_hive_27 string + ,test_hive_29 string + ,test_hive_30 string + ,test_hive_97 string + ,test_hive_96 string + ,test_hive_98 string + ,test_hive_101 string + ,test_hive_102 string + ,test_hive_109 string + ,test_hive_111 string + ,test_hive_112 string + ,test_hive_113 string + ,test_hive_114 string + ,test_hive_115 string + ,test_hive_78 string + ,test_hive_79 string + ,test_hive_24 string + ,test_hive_26 string + ,test_hive_110 string + ,test_hive_77 string + ,test_hive_87 string + ,test_hive_92 string + ,test_hive_90 string + ,test_hive_74 string + ,test_hive_85 string + ,test_hive_81 string + ,test_hive_82 string + ,test_hive_106 string + ,test_hive_107 string + ,test_hive_108 string + ,test_hive_75 string + ,test_hive_86 string + ,test_hive_76 string + ,test_hive_89 string + ,test_hive_88 string + ,test_hive_91 string + ,test_hive_71 string + ,test_hive_72 string + ,test_hive_73 string + ,test_hive_80 string + ,test_hive_103 string + ,test_hive_104 string + ,test_hive_1002 string + ,test_hive_1003 string + ,test_hive_25 string + ,test_hive_28 string + ,test_hive_93 string + ,test_hive_94 string + ,test_hive_95 string + ,test_hive_99 string + ,test_hive_105 string + ,test_hive_83 string + ,test_hive_84 string + ,test_hive_100 string + ,test_hive_1023 string + ,test_hive_1024 string + ,test_hive_1010 string + ,test_hive_1010_a_d string + ,test_hive_1010_a_g string + ,test_hive_1026 string + ,test_hive_1000 string + ,test_hive_1001 string + ,test_hive_1030 string + ,test_hive_1030_1 string + ,test_hive_1030_2 string + ,test_hive_1030_3 string + ,test_hive_1021 string + ,test_hive_1020 string + ,test_hive_1022 string + ,test_hive_1019 string + ,test_hive_1027 string + ,test_hive_1028 string + ,test_hive_1029 string + ,test_hive_1005 string + ,test_hive_1005_a_d string + ,test_hive_1005_psr string + ,test_hive_1005_psr_a_d string + ,test_hive_1005_psr_e string + ,test_hive_1013 string + ,test_hive_1013_a_d string + ,test_hive_1013_psr string + ,test_hive_1013_psr_a_d string + ,test_hive_1013_psr_e string + ,test_hive_1034 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1037 purge; + +create table if not exists test_hive_1037 +( +max_partition bigint +); + +drop view if exists test_hive_1040; + +create view if not exists test_hive_1040 +as +select + cast(test_hive_1018 as int) as test_hive_1018 + ,cast(test_hive_1004 as int) as test_hive_1004 + ,cast(test_hive_1025 as int) as test_hive_1025 + ,cast(test_hive_23 as string) as test_hive_23 + ,cast(test_hive_27 as string) as test_hive_27 + ,cast(test_hive_29 as string) as test_hive_29 + ,cast(test_hive_30 as string) as test_hive_30 + ,cast(test_hive_97 as string) as test_hive_97 + ,cast(test_hive_96 as string) as test_hive_96 + ,cast(test_hive_98 as string) as test_hive_98 + ,cast(test_hive_101 as string) as test_hive_101 + ,cast(test_hive_102 as string) as test_hive_102 + ,cast(test_hive_109 as string) as test_hive_109 + ,cast(test_hive_111 as string) as test_hive_111 + ,cast(test_hive_112 as string) as test_hive_112 + ,cast(test_hive_113 as string) as test_hive_113 + ,cast(test_hive_114 as string) as test_hive_114 + ,cast(test_hive_115 as string) as test_hive_115 + ,cast(test_hive_78 as string) as test_hive_78 + ,cast(test_hive_79 as string) as test_hive_79 + ,cast(test_hive_24 as string) as test_hive_24 + ,cast(test_hive_26 as string) as test_hive_26 + ,cast(test_hive_110 as string) as test_hive_110 + ,cast(test_hive_77 as string) as test_hive_77 + ,cast(test_hive_87 as string) as test_hive_87 + ,cast(test_hive_92 as string) as test_hive_92 + ,cast(test_hive_90 as string) as test_hive_90 + ,cast(test_hive_74 as string) as test_hive_74 + ,cast(test_hive_85 as string) as test_hive_85 + ,cast(test_hive_81 as string) as test_hive_81 + ,cast(test_hive_82 as string) as test_hive_82 + ,cast(test_hive_106 as string) as test_hive_106 + ,cast(test_hive_107 as string) as test_hive_107 + ,cast(test_hive_108 as string) as test_hive_108 + ,cast(test_hive_75 as string) as test_hive_75 + ,cast(test_hive_86 as string) as test_hive_86 + ,cast(test_hive_76 as string) as test_hive_76 + ,cast(test_hive_89 as string) as test_hive_89 + ,cast(test_hive_88 as string) as test_hive_88 + ,cast(test_hive_91 as string) as test_hive_91 + ,cast(test_hive_71 as string) as test_hive_71 + ,cast(test_hive_72 as string) as test_hive_72 + ,cast(test_hive_73 as string) as test_hive_73 + ,cast(test_hive_80 as string) as test_hive_80 + ,cast(test_hive_103 as string) as test_hive_103 + ,cast(test_hive_104 as string) as test_hive_104 + ,cast(test_hive_1002 as string) as test_hive_1002 + ,cast(test_hive_1003 as string) as test_hive_1003 + ,cast(from_unixtime(unix_timestamp(test_hive_25,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_25 + ,cast(test_hive_28 as string) as test_hive_28 + ,cast(test_hive_93 as string) as test_hive_93 + ,cast(test_hive_94 as string) as test_hive_94 + ,cast(test_hive_95 as string) as test_hive_95 + ,cast(test_hive_99 as string) as test_hive_99 + ,cast(test_hive_105 as string) as test_hive_105 + ,cast(test_hive_83 as string) as test_hive_83 + ,cast(test_hive_84 as string) as test_hive_84 + ,cast(test_hive_100 as string) as test_hive_100 + ,cast(test_hive_1023 as int) as test_hive_1023 + ,cast(test_hive_1024 as int) as test_hive_1024 + ,cast(test_hive_1010 as int) as test_hive_1010 + ,cast(test_hive_1010_a_d as int) as test_hive_1010_a_d + ,cast(test_hive_1010_a_g as int) as test_hive_1010_a_g + ,cast(test_hive_1026 as double) as test_hive_1026 + ,cast(test_hive_1000 as double) as test_hive_1000 + ,cast(test_hive_1001 as double) as test_hive_1001 + ,cast(test_hive_1030 as int) as test_hive_1030 + ,cast(test_hive_1030_1 as int) as test_hive_1030_1 + ,cast(test_hive_1030_2 as int) as test_hive_1030_2 + ,cast(test_hive_1030_3 as int) as test_hive_1030_3 + ,cast(test_hive_1021 as double) as test_hive_1021 + ,cast(test_hive_1020 as double) as test_hive_1020 + ,cast(test_hive_1022 as int) as test_hive_1022 + ,cast(test_hive_1019 as int) as test_hive_1019 + ,cast(test_hive_1027 as double) as test_hive_1027 + ,cast(test_hive_1028 as double) as test_hive_1028 + ,cast(test_hive_1029 as double) as test_hive_1029 + ,cast(test_hive_1005 as int) as test_hive_1005 + ,cast(test_hive_1005_a_d as int) as test_hive_1005_a_d + ,cast(test_hive_1005_psr as int) as test_hive_1005_psr + ,cast(test_hive_1005_psr_a_d as int) as test_hive_1005_psr_a_d + ,cast(test_hive_1005_psr_e as int) as test_hive_1005_psr_e + ,cast(test_hive_1013 as int) as test_hive_1013 + ,cast(test_hive_1013_a_d as int) as test_hive_1013_a_d + ,cast(test_hive_1013_psr as int) as test_hive_1013_psr + ,cast(test_hive_1013_psr_a_d as int) as test_hive_1013_psr_a_d + ,cast(test_hive_1013_psr_e as int) as test_hive_1013_psr_e + ,cast(from_unixtime(unix_timestamp(test_hive_1034,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1034 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1038 +; + +drop view if exists test_hive_1039; + +create view test_hive_1039 +as +select + test_hive_1018 as test_hive_1018 + ,test_hive_1004 as test_hive_1004 + ,test_hive_1025 as test_hive_1025 + ,test_hive_23 as test_hive_23 + ,test_hive_27 as test_hive_27 + ,test_hive_29 as test_hive_29 + ,test_hive_30 as test_hive_30 + ,test_hive_97 as test_hive_97 + ,test_hive_96 as test_hive_96 + ,test_hive_98 as test_hive_98 + ,test_hive_101 as test_hive_101 + ,test_hive_102 as test_hive_102 + ,test_hive_109 as test_hive_109 + ,test_hive_111 as test_hive_111 + ,test_hive_112 as test_hive_112 + ,test_hive_113 as test_hive_113 + ,test_hive_114 as test_hive_114 + ,test_hive_115 as test_hive_115 + ,test_hive_78 as test_hive_78 + ,test_hive_79 as test_hive_79 + ,test_hive_24 as test_hive_24 + ,test_hive_26 as test_hive_26 + ,test_hive_110 as test_hive_110 + ,test_hive_77 as test_hive_77 + ,test_hive_87 as test_hive_87 + ,test_hive_92 as test_hive_92 + ,test_hive_90 as test_hive_90 + ,test_hive_74 as test_hive_74 + ,test_hive_85 as test_hive_85 + ,test_hive_81 as test_hive_81 + ,test_hive_82 as test_hive_82 + ,test_hive_106 as test_hive_106 + ,test_hive_107 as test_hive_107 + ,test_hive_108 as test_hive_108 + ,test_hive_75 as test_hive_75 + ,test_hive_86 as test_hive_86 + ,test_hive_76 as test_hive_76 + ,test_hive_89 as test_hive_89 + ,test_hive_88 as test_hive_88 + ,test_hive_91 as test_hive_91 + ,test_hive_71 as test_hive_71 + ,test_hive_72 as test_hive_72 + ,test_hive_73 as test_hive_73 + ,test_hive_80 as test_hive_80 + ,test_hive_103 as test_hive_103 + ,test_hive_104 as test_hive_104 + ,test_hive_1002 as test_hive_1002 + ,test_hive_1003 as test_hive_1003 + ,test_hive_25 as test_hive_25 + ,test_hive_28 as test_hive_28 + ,test_hive_93 as test_hive_93 + ,test_hive_94 as test_hive_94 + ,test_hive_95 as test_hive_95 + ,test_hive_99 as test_hive_99 + ,test_hive_105 as test_hive_105 + ,test_hive_83 as test_hive_83 + ,test_hive_84 as test_hive_84 + ,test_hive_100 as test_hive_100 + ,test_hive_1023 as test_hive_1023 + ,test_hive_1024 as test_hive_1024 + ,test_hive_1010 as test_hive_1010 + ,test_hive_1010_a_d as test_hive_1010_a_d + ,test_hive_1010_a_g as test_hive_1010_a_g + ,test_hive_1026 as test_hive_1026 + ,test_hive_1000 as test_hive_1000 + ,test_hive_1001 as test_hive_1001 + ,test_hive_1030 as test_hive_1030 + ,test_hive_1030_1 as test_hive_1030_1 + ,test_hive_1030_2 as test_hive_1030_2 + ,test_hive_1030_3 as test_hive_1030_3 + ,test_hive_1021 as test_hive_1021 + ,test_hive_1020 as test_hive_1020 + ,test_hive_1022 as test_hive_1022 + ,test_hive_1019 as test_hive_1019 + ,test_hive_1027 as test_hive_1027 + ,test_hive_1028 as test_hive_1028 + ,test_hive_1029 as test_hive_1029 + ,test_hive_1005 as test_hive_1005 + ,test_hive_1005_a_d as test_hive_1005_a_d + ,test_hive_1005_psr as test_hive_1005_psr + ,test_hive_1005_psr_a_d as test_hive_1005_psr_a_d + ,test_hive_1005_psr_e as test_hive_1005_psr_e + ,test_hive_1013 as test_hive_1013 + ,test_hive_1013_a_d as test_hive_1013_a_d + ,test_hive_1013_psr as test_hive_1013_psr + ,test_hive_1013_psr_a_d as test_hive_1013_psr_a_d + ,test_hive_1013_psr_e as test_hive_1013_psr_e + ,test_hive_1034 as test_hive_1034 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1040 t1 +; + +drop view if exists test_hive_1036; + +create view test_hive_1036 +as +select t1.* +from test_hive_1039 t1 +inner join test_hive_1037 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1054 purge; + +create table test_hive_1054 +( + test_hive_1047 string + ,test_hive_1045 string + ,test_hive_1048 string + ,test_hive_132 string + ,test_hive_146 string + ,test_hive_1043 string + ,test_hive_149 string + ,test_hive_150 string + ,test_hive_119 string + ,test_hive_118 string + ,test_hive_120 string + ,test_hive_151 string + ,test_hive_116 string + ,test_hive_117 string + ,test_hive_121 string + ,test_hive_122 string + ,test_hive_152 string + ,test_hive_155 string + ,test_hive_159 string + ,test_hive_131 string + ,test_hive_140 string + ,test_hive_145 string + ,test_hive_143 string + ,test_hive_128 string + ,test_hive_138 string + ,test_hive_134 string + ,test_hive_135 string + ,test_hive_156 string + ,test_hive_157 string + ,test_hive_158 string + ,test_hive_129 string + ,test_hive_139 string + ,test_hive_130 string + ,test_hive_142 string + ,test_hive_141 string + ,test_hive_144 string + ,test_hive_125 string + ,test_hive_126 string + ,test_hive_127 string + ,test_hive_133 string + ,test_hive_154 string + ,test_hive_123 string + ,test_hive_160 string + ,test_hive_136 string + ,test_hive_137 string + ,test_hive_124 string + ,test_hive_153 string + ,test_hive_148 string + ,test_hive_147 string + ,test_hive_1052 string + ,test_hive_1051 string + ,test_hive_1041 string + ,test_hive_1042 string + ,test_hive_1044 string + ,test_hive_1046 string + ,test_hive_1050 string + ,test_hive_1049 string + ,test_hive_1053 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1057 +( + test_hive_1047 string + ,test_hive_1045 string + ,test_hive_1048 string + ,test_hive_132 string + ,test_hive_146 string + ,test_hive_1043 string + ,test_hive_149 string + ,test_hive_150 string + ,test_hive_119 string + ,test_hive_118 string + ,test_hive_120 string + ,test_hive_151 string + ,test_hive_116 string + ,test_hive_117 string + ,test_hive_121 string + ,test_hive_122 string + ,test_hive_152 string + ,test_hive_155 string + ,test_hive_159 string + ,test_hive_131 string + ,test_hive_140 string + ,test_hive_145 string + ,test_hive_143 string + ,test_hive_128 string + ,test_hive_138 string + ,test_hive_134 string + ,test_hive_135 string + ,test_hive_156 string + ,test_hive_157 string + ,test_hive_158 string + ,test_hive_129 string + ,test_hive_139 string + ,test_hive_130 string + ,test_hive_142 string + ,test_hive_141 string + ,test_hive_144 string + ,test_hive_125 string + ,test_hive_126 string + ,test_hive_127 string + ,test_hive_133 string + ,test_hive_154 string + ,test_hive_123 string + ,test_hive_160 string + ,test_hive_136 string + ,test_hive_137 string + ,test_hive_124 string + ,test_hive_153 string + ,test_hive_148 string + ,test_hive_147 string + ,test_hive_1052 string + ,test_hive_1051 string + ,test_hive_1041 string + ,test_hive_1042 string + ,test_hive_1044 string + ,test_hive_1046 string + ,test_hive_1050 string + ,test_hive_1049 string + ,test_hive_1053 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1056 purge; + +create table if not exists test_hive_1056 +( +max_partition bigint +); + +drop view if exists test_hive_1059; + +create view if not exists test_hive_1059 +as +select + cast(test_hive_1047 as int) as test_hive_1047 + ,cast(test_hive_1045 as int) as test_hive_1045 + ,cast(test_hive_1048 as int) as test_hive_1048 + ,cast(test_hive_132 as string) as test_hive_132 + ,cast(test_hive_146 as string) as test_hive_146 + ,cast(test_hive_1043 as string) as test_hive_1043 + ,cast(test_hive_149 as string) as test_hive_149 + ,cast(test_hive_150 as string) as test_hive_150 + ,cast(from_unixtime(unix_timestamp(test_hive_119,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_119 + ,cast(test_hive_118 as string) as test_hive_118 + ,cast(test_hive_120 as string) as test_hive_120 + ,cast(test_hive_151 as string) as test_hive_151 + ,cast(test_hive_116 as string) as test_hive_116 + ,cast(test_hive_117 as string) as test_hive_117 + ,cast(test_hive_121 as string) as test_hive_121 + ,cast(test_hive_122 as string) as test_hive_122 + ,cast(test_hive_152 as string) as test_hive_152 + ,cast(test_hive_155 as string) as test_hive_155 + ,cast(test_hive_159 as string) as test_hive_159 + ,cast(test_hive_131 as string) as test_hive_131 + ,cast(test_hive_140 as string) as test_hive_140 + ,cast(test_hive_145 as string) as test_hive_145 + ,cast(test_hive_143 as string) as test_hive_143 + ,cast(test_hive_128 as string) as test_hive_128 + ,cast(test_hive_138 as string) as test_hive_138 + ,cast(test_hive_134 as string) as test_hive_134 + ,cast(test_hive_135 as string) as test_hive_135 + ,cast(test_hive_156 as string) as test_hive_156 + ,cast(test_hive_157 as string) as test_hive_157 + ,cast(test_hive_158 as string) as test_hive_158 + ,cast(test_hive_129 as string) as test_hive_129 + ,cast(test_hive_139 as string) as test_hive_139 + ,cast(test_hive_130 as string) as test_hive_130 + ,cast(test_hive_142 as string) as test_hive_142 + ,cast(test_hive_141 as string) as test_hive_141 + ,cast(test_hive_144 as string) as test_hive_144 + ,cast(test_hive_125 as string) as test_hive_125 + ,cast(test_hive_126 as string) as test_hive_126 + ,cast(test_hive_127 as string) as test_hive_127 + ,cast(test_hive_133 as string) as test_hive_133 + ,cast(test_hive_154 as string) as test_hive_154 + ,cast(test_hive_123 as string) as test_hive_123 + ,cast(test_hive_160 as string) as test_hive_160 + ,cast(test_hive_136 as string) as test_hive_136 + ,cast(test_hive_137 as string) as test_hive_137 + ,cast(test_hive_124 as string) as test_hive_124 + ,cast(test_hive_153 as string) as test_hive_153 + ,cast(test_hive_148 as string) as test_hive_148 + ,cast(test_hive_147 as string) as test_hive_147 + ,cast(test_hive_1052 as int) as test_hive_1052 + ,cast(test_hive_1051 as int) as test_hive_1051 + ,cast(test_hive_1041 as int) as test_hive_1041 + ,cast(test_hive_1042 as int) as test_hive_1042 + ,cast(test_hive_1044 as int) as test_hive_1044 + ,cast(test_hive_1046 as int) as test_hive_1046 + ,cast(test_hive_1050 as int) as test_hive_1050 + ,cast(test_hive_1049 as int) as test_hive_1049 + ,cast(from_unixtime(unix_timestamp(test_hive_1053,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1053 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1057 +; + +drop view if exists test_hive_1058; + +create view test_hive_1058 +as +select + test_hive_1047 as test_hive_1047 + ,test_hive_1045 as test_hive_1045 + ,test_hive_1048 as test_hive_1048 + ,test_hive_132 as test_hive_132 + ,test_hive_146 as test_hive_146 + ,test_hive_1043 as test_hive_1043 + ,test_hive_149 as test_hive_149 + ,test_hive_150 as test_hive_150 + ,test_hive_119 as test_hive_119 + ,test_hive_118 as test_hive_118 + ,test_hive_120 as test_hive_120 + ,test_hive_151 as test_hive_151 + ,test_hive_116 as test_hive_116 + ,test_hive_117 as test_hive_117 + ,test_hive_121 as test_hive_121 + ,test_hive_122 as test_hive_122 + ,test_hive_152 as test_hive_152 + ,test_hive_155 as test_hive_155 + ,test_hive_159 as test_hive_159 + ,test_hive_131 as test_hive_131 + ,test_hive_140 as test_hive_140 + ,test_hive_145 as test_hive_145 + ,test_hive_143 as test_hive_143 + ,test_hive_128 as test_hive_128 + ,test_hive_138 as test_hive_138 + ,test_hive_134 as test_hive_134 + ,test_hive_135 as test_hive_135 + ,test_hive_156 as test_hive_156 + ,test_hive_157 as test_hive_157 + ,test_hive_158 as test_hive_158 + ,test_hive_129 as test_hive_129 + ,test_hive_139 as test_hive_139 + ,test_hive_130 as test_hive_130 + ,test_hive_142 as test_hive_142 + ,test_hive_141 as test_hive_141 + ,test_hive_144 as test_hive_144 + ,test_hive_125 as test_hive_125 + ,test_hive_126 as test_hive_126 + ,test_hive_127 as test_hive_127 + ,test_hive_133 as test_hive_133 + ,test_hive_154 as test_hive_154 + ,test_hive_123 as test_hive_123 + ,test_hive_160 as test_hive_160 + ,test_hive_136 as test_hive_136 + ,test_hive_137 as test_hive_137 + ,test_hive_124 as test_hive_124 + ,test_hive_153 as test_hive_153 + ,test_hive_148 as test_hive_148 + ,test_hive_147 as test_hive_147 + ,test_hive_1052 as test_hive_1052 + ,test_hive_1051 as test_hive_1051 + ,test_hive_1041 as test_hive_1041 + ,test_hive_1042 as test_hive_1042 + ,test_hive_1044 as test_hive_1044 + ,test_hive_1046 as test_hive_1046 + ,test_hive_1050 as test_hive_1050 + ,test_hive_1049 as test_hive_1049 + ,test_hive_1053 as test_hive_1053 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1059 t1 +; + +drop view if exists test_hive_1055; + +create view test_hive_1055 +as +select t1.* +from test_hive_1058 t1 +inner join test_hive_1056 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1083 purge; + +create table test_hive_1083 +( + test_hive_1072 string + ,test_hive_1065 string + ,test_hive_1073 string + ,test_hive_161 string + ,test_hive_162 string + ,test_hive_163 string + ,test_hive_164 string + ,test_hive_167 string + ,test_hive_168 string + ,test_hive_170 string + ,test_hive_197 string + ,test_hive_198 string + ,test_hive_200 string + ,test_hive_201 string + ,test_hive_202 string + ,test_hive_203 string + ,test_hive_205 string + ,test_hive_206 string + ,test_hive_212 string + ,test_hive_213 string + ,test_hive_178 string + ,test_hive_1060 string + ,test_hive_1061 string + ,test_hive_10612 string + ,test_hive_1063 string + ,test_hive_1064 string + ,test_hive_165 string + ,test_hive_166 string + ,test_hive_169 string + ,test_hive_193 string + ,test_hive_194 string + ,test_hive_195 string + ,test_hive_196 string + ,test_hive_204 string + ,test_hive_207 string + ,test_hive_208 string + ,test_hive_209 string + ,test_hive_210 string + ,test_hive_211 string + ,test_hive_171 string + ,test_hive_172 string + ,test_hive_173 string + ,test_hive_174 string + ,test_hive_175 string + ,test_hive_176 string + ,test_hive_177 string + ,test_hive_179 string + ,test_hive_180 string + ,test_hive_181 string + ,test_hive_182 string + ,test_hive_183 string + ,test_hive_184 string + ,test_hive_185 string + ,test_hive_186 string + ,test_hive_187 string + ,test_hive_188 string + ,test_hive_189 string + ,test_hive_190 string + ,test_hive_191 string + ,test_hive_192 string + ,test_hive_1067 string + ,test_hive_1067_a_g string + ,test_hive_1067_h string + ,test_hive_1066 string + ,test_hive_1070 string + ,test_hive_1070_a_d string + ,test_hive_1074 string + ,test_hive_1074_bp string + ,test_hive_1074_cont string + ,test_hive_1074_lag string + ,test_hive_1078 string + ,test_hive_1078_bp string + ,test_hive_1078_cont string + ,test_hive_1078_lag string + ,test_hive_199 string + ,test_hive_1082 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1086 +( + test_hive_1072 string + ,test_hive_1065 string + ,test_hive_1073 string + ,test_hive_161 string + ,test_hive_162 string + ,test_hive_163 string + ,test_hive_164 string + ,test_hive_167 string + ,test_hive_168 string + ,test_hive_170 string + ,test_hive_197 string + ,test_hive_198 string + ,test_hive_200 string + ,test_hive_201 string + ,test_hive_202 string + ,test_hive_203 string + ,test_hive_205 string + ,test_hive_206 string + ,test_hive_212 string + ,test_hive_213 string + ,test_hive_178 string + ,test_hive_1060 string + ,test_hive_1061 string + ,test_hive_10612 string + ,test_hive_1063 string + ,test_hive_1064 string + ,test_hive_165 string + ,test_hive_166 string + ,test_hive_169 string + ,test_hive_193 string + ,test_hive_194 string + ,test_hive_195 string + ,test_hive_196 string + ,test_hive_204 string + ,test_hive_207 string + ,test_hive_208 string + ,test_hive_209 string + ,test_hive_210 string + ,test_hive_211 string + ,test_hive_171 string + ,test_hive_172 string + ,test_hive_173 string + ,test_hive_174 string + ,test_hive_175 string + ,test_hive_176 string + ,test_hive_177 string + ,test_hive_179 string + ,test_hive_180 string + ,test_hive_181 string + ,test_hive_182 string + ,test_hive_183 string + ,test_hive_184 string + ,test_hive_185 string + ,test_hive_186 string + ,test_hive_187 string + ,test_hive_188 string + ,test_hive_189 string + ,test_hive_190 string + ,test_hive_191 string + ,test_hive_192 string + ,test_hive_1067 string + ,test_hive_1067_a_g string + ,test_hive_1067_h string + ,test_hive_1066 string + ,test_hive_1070 string + ,test_hive_1070_a_d string + ,test_hive_1074 string + ,test_hive_1074_bp string + ,test_hive_1074_cont string + ,test_hive_1074_lag string + ,test_hive_1078 string + ,test_hive_1078_bp string + ,test_hive_1078_cont string + ,test_hive_1078_lag string + ,test_hive_199 string + ,test_hive_1082 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1085 purge; + +create table if not exists test_hive_1085 +( +max_partition bigint +); + +drop view if exists test_hive_1088; + +create view if not exists test_hive_1088 +as +select + cast(test_hive_1072 as int) as test_hive_1072 + ,cast(test_hive_1065 as int) as test_hive_1065 + ,cast(test_hive_1073 as int) as test_hive_1073 + ,cast(test_hive_161 as string) as test_hive_161 + ,cast(test_hive_162 as string) as test_hive_162 + ,cast(test_hive_163 as string) as test_hive_163 + ,cast(test_hive_164 as string) as test_hive_164 + ,cast(test_hive_167 as string) as test_hive_167 + ,cast(test_hive_168 as string) as test_hive_168 + ,cast(test_hive_170 as string) as test_hive_170 + ,cast(test_hive_197 as string) as test_hive_197 + ,cast(test_hive_198 as string) as test_hive_198 + ,cast(test_hive_200 as string) as test_hive_200 + ,cast(test_hive_201 as string) as test_hive_201 + ,cast(test_hive_202 as string) as test_hive_202 + ,cast(test_hive_203 as string) as test_hive_203 + ,cast(test_hive_205 as string) as test_hive_205 + ,cast(test_hive_206 as string) as test_hive_206 + ,cast(test_hive_212 as string) as test_hive_212 + ,cast(test_hive_213 as string) as test_hive_213 + ,cast(test_hive_178 as string) as test_hive_178 + ,cast(from_unixtime(unix_timestamp(test_hive_1060,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1060 + ,cast(test_hive_1061 as string) as test_hive_1061 + ,cast(test_hive_10612 as string) as test_hive_10612 + ,cast(test_hive_1063 as string) as test_hive_1063 + ,cast(test_hive_1064 as string) as test_hive_1064 + ,cast(test_hive_165 as string) as test_hive_165 + ,cast(test_hive_166 as string) as test_hive_166 + ,cast(test_hive_169 as string) as test_hive_169 + ,cast(test_hive_193 as string) as test_hive_193 + ,cast(test_hive_194 as string) as test_hive_194 + ,cast(test_hive_195 as string) as test_hive_195 + ,cast(test_hive_196 as string) as test_hive_196 + ,cast(test_hive_204 as string) as test_hive_204 + ,cast(test_hive_207 as string) as test_hive_207 + ,cast(test_hive_208 as string) as test_hive_208 + ,cast(test_hive_209 as string) as test_hive_209 + ,cast(test_hive_210 as string) as test_hive_210 + ,cast(test_hive_211 as string) as test_hive_211 + ,cast(test_hive_171 as string) as test_hive_171 + ,cast(test_hive_172 as string) as test_hive_172 + ,cast(test_hive_173 as string) as test_hive_173 + ,cast(test_hive_174 as string) as test_hive_174 + ,cast(test_hive_175 as string) as test_hive_175 + ,cast(test_hive_176 as string) as test_hive_176 + ,cast(test_hive_177 as string) as test_hive_177 + ,cast(test_hive_179 as string) as test_hive_179 + ,cast(test_hive_180 as string) as test_hive_180 + ,cast(test_hive_181 as string) as test_hive_181 + ,cast(test_hive_182 as string) as test_hive_182 + ,cast(test_hive_183 as string) as test_hive_183 + ,cast(test_hive_184 as string) as test_hive_184 + ,cast(test_hive_185 as string) as test_hive_185 + ,cast(test_hive_186 as string) as test_hive_186 + ,cast(test_hive_187 as string) as test_hive_187 + ,cast(test_hive_188 as string) as test_hive_188 + ,cast(test_hive_189 as string) as test_hive_189 + ,cast(test_hive_190 as string) as test_hive_190 + ,cast(test_hive_191 as string) as test_hive_191 + ,cast(test_hive_192 as string) as test_hive_192 + ,cast(test_hive_1067 as int) as test_hive_1067 + ,cast(test_hive_1067_a_g as int) as test_hive_1067_a_g + ,cast(test_hive_1067_h as int) as test_hive_1067_h + ,cast(test_hive_1066 as int) as test_hive_1066 + ,cast(test_hive_1070 as int) as test_hive_1070 + ,cast(test_hive_1070_a_d as int) as test_hive_1070_a_d + ,cast(test_hive_1074 as int) as test_hive_1074 + ,cast(test_hive_1074_bp as int) as test_hive_1074_bp + ,cast(test_hive_1074_cont as int) as test_hive_1074_cont + ,cast(test_hive_1074_lag as int) as test_hive_1074_lag + ,cast(test_hive_1078 as int) as test_hive_1078 + ,cast(test_hive_1078_bp as int) as test_hive_1078_bp + ,cast(test_hive_1078_cont as int) as test_hive_1078_cont + ,cast(test_hive_1078_lag as int) as test_hive_1078_lag + ,cast(test_hive_199 as string) as test_hive_199 + ,cast(from_unixtime(unix_timestamp(test_hive_1082,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1082 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1086 +; + +drop view if exists test_hive_1087; + +create view test_hive_1087 +as +select + test_hive_1072 as test_hive_1072 + ,test_hive_1065 as test_hive_1065 + ,test_hive_1073 as test_hive_1073 + ,test_hive_161 as test_hive_161 + ,test_hive_162 as test_hive_162 + ,test_hive_163 as test_hive_163 + ,test_hive_164 as test_hive_164 + ,test_hive_167 as test_hive_167 + ,test_hive_168 as test_hive_168 + ,test_hive_170 as test_hive_170 + ,test_hive_197 as test_hive_197 + ,test_hive_198 as test_hive_198 + ,test_hive_200 as test_hive_200 + ,test_hive_201 as test_hive_201 + ,test_hive_202 as test_hive_202 + ,test_hive_203 as test_hive_203 + ,test_hive_205 as test_hive_205 + ,test_hive_206 as test_hive_206 + ,test_hive_212 as test_hive_212 + ,test_hive_213 as test_hive_213 + ,test_hive_178 as test_hive_178 + ,test_hive_1060 as test_hive_1060 + ,test_hive_1061 as test_hive_1061 + ,test_hive_10612 as test_hive_10612 + ,test_hive_1063 as test_hive_1063 + ,test_hive_1064 as test_hive_1064 + ,test_hive_165 as test_hive_165 + ,test_hive_166 as test_hive_166 + ,test_hive_169 as test_hive_169 + ,test_hive_193 as test_hive_193 + ,test_hive_194 as test_hive_194 + ,test_hive_195 as test_hive_195 + ,test_hive_196 as test_hive_196 + ,test_hive_204 as test_hive_204 + ,test_hive_207 as test_hive_207 + ,test_hive_208 as test_hive_208 + ,test_hive_209 as test_hive_209 + ,test_hive_210 as test_hive_210 + ,test_hive_211 as test_hive_211 + ,test_hive_171 as test_hive_171 + ,test_hive_172 as test_hive_172 + ,test_hive_173 as test_hive_173 + ,test_hive_174 as test_hive_174 + ,test_hive_175 as test_hive_175 + ,test_hive_176 as test_hive_176 + ,test_hive_177 as test_hive_177 + ,test_hive_179 as test_hive_179 + ,test_hive_180 as test_hive_180 + ,test_hive_181 as test_hive_181 + ,test_hive_182 as test_hive_182 + ,test_hive_183 as test_hive_183 + ,test_hive_184 as test_hive_184 + ,test_hive_185 as test_hive_185 + ,test_hive_186 as test_hive_186 + ,test_hive_187 as test_hive_187 + ,test_hive_188 as test_hive_188 + ,test_hive_189 as test_hive_189 + ,test_hive_190 as test_hive_190 + ,test_hive_191 as test_hive_191 + ,test_hive_192 as test_hive_192 + ,test_hive_1067 as test_hive_1067 + ,test_hive_1067_a_g as test_hive_1067_a_g + ,test_hive_1067_h as test_hive_1067_h + ,test_hive_1066 as test_hive_1066 + ,test_hive_1070 as test_hive_1070 + ,test_hive_1070_a_d as test_hive_1070_a_d + ,test_hive_1074 as test_hive_1074 + ,test_hive_1074_bp as test_hive_1074_bp + ,test_hive_1074_cont as test_hive_1074_cont + ,test_hive_1074_lag as test_hive_1074_lag + ,test_hive_1078 as test_hive_1078 + ,test_hive_1078_bp as test_hive_1078_bp + ,test_hive_1078_cont as test_hive_1078_cont + ,test_hive_1078_lag as test_hive_1078_lag + ,test_hive_199 as test_hive_199 + ,test_hive_1082 as test_hive_1082 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1088 t1 +; + +drop view if exists test_hive_1084; + +create view test_hive_1084 +as +select t1.* +from test_hive_1087 t1 +inner join test_hive_1085 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1100 purge; + +create table test_hive_1100 +( + test_hive_1097 string + ,test_hive_1095 string + ,test_hive_1098 string + ,test_hive_1089 string + ,test_hive_1090 string + ,test_hive_10902 string + ,test_hive_1092 string + ,test_hive_1093 string + ,test_hive_244 string + ,test_hive_225 string + ,test_hive_214 string + ,test_hive_215 string + ,test_hive_216 string + ,test_hive_217 string + ,test_hive_240 string + ,test_hive_241 string + ,test_hive_242 string + ,test_hive_243 string + ,test_hive_245 string + ,test_hive_246 string + ,test_hive_247 string + ,test_hive_248 string + ,test_hive_249 string + ,test_hive_250 string + ,test_hive_218 string + ,test_hive_219 string + ,test_hive_220 string + ,test_hive_221 string + ,test_hive_222 string + ,test_hive_223 string + ,test_hive_224 string + ,test_hive_226 string + ,test_hive_227 string + ,test_hive_228 string + ,test_hive_229 string + ,test_hive_230 string + ,test_hive_231 string + ,test_hive_232 string + ,test_hive_233 string + ,test_hive_234 string + ,test_hive_235 string + ,test_hive_236 string + ,test_hive_237 string + ,test_hive_238 string + ,test_hive_239 string + ,test_hive_1094 string + ,test_hive_1096 string + ,test_hive_1099 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1103 +( + test_hive_1097 string + ,test_hive_1095 string + ,test_hive_1098 string + ,test_hive_1089 string + ,test_hive_1090 string + ,test_hive_10902 string + ,test_hive_1092 string + ,test_hive_1093 string + ,test_hive_244 string + ,test_hive_225 string + ,test_hive_214 string + ,test_hive_215 string + ,test_hive_216 string + ,test_hive_217 string + ,test_hive_240 string + ,test_hive_241 string + ,test_hive_242 string + ,test_hive_243 string + ,test_hive_245 string + ,test_hive_246 string + ,test_hive_247 string + ,test_hive_248 string + ,test_hive_249 string + ,test_hive_250 string + ,test_hive_218 string + ,test_hive_219 string + ,test_hive_220 string + ,test_hive_221 string + ,test_hive_222 string + ,test_hive_223 string + ,test_hive_224 string + ,test_hive_226 string + ,test_hive_227 string + ,test_hive_228 string + ,test_hive_229 string + ,test_hive_230 string + ,test_hive_231 string + ,test_hive_232 string + ,test_hive_233 string + ,test_hive_234 string + ,test_hive_235 string + ,test_hive_236 string + ,test_hive_237 string + ,test_hive_238 string + ,test_hive_239 string + ,test_hive_1094 string + ,test_hive_1096 string + ,test_hive_1099 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1102 purge; + +create table if not exists test_hive_1102 +( +max_partition bigint +); + +drop view if exists test_hive_1105; + +create view if not exists test_hive_1105 +as +select + cast(test_hive_1097 as int) as test_hive_1097 + ,cast(test_hive_1095 as int) as test_hive_1095 + ,cast(test_hive_1098 as int) as test_hive_1098 + ,cast(from_unixtime(unix_timestamp(test_hive_1089,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1089 + ,cast(test_hive_1090 as string) as test_hive_1090 + ,cast(test_hive_10902 as string) as test_hive_10902 + ,cast(test_hive_1092 as string) as test_hive_1092 + ,cast(test_hive_1093 as string) as test_hive_1093 + ,cast(test_hive_244 as string) as test_hive_244 + ,cast(test_hive_225 as string) as test_hive_225 + ,cast(test_hive_214 as string) as test_hive_214 + ,cast(from_unixtime(unix_timestamp(test_hive_215,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_215 + ,cast(test_hive_216 as string) as test_hive_216 + ,cast(test_hive_217 as string) as test_hive_217 + ,cast(test_hive_240 as string) as test_hive_240 + ,cast(test_hive_241 as string) as test_hive_241 + ,cast(test_hive_242 as string) as test_hive_242 + ,cast(test_hive_243 as string) as test_hive_243 + ,cast(test_hive_245 as string) as test_hive_245 + ,cast(test_hive_246 as string) as test_hive_246 + ,cast(test_hive_247 as string) as test_hive_247 + ,cast(test_hive_248 as string) as test_hive_248 + ,cast(test_hive_249 as string) as test_hive_249 + ,cast(test_hive_250 as string) as test_hive_250 + ,cast(test_hive_218 as string) as test_hive_218 + ,cast(test_hive_219 as string) as test_hive_219 + ,cast(test_hive_220 as string) as test_hive_220 + ,cast(test_hive_221 as string) as test_hive_221 + ,cast(test_hive_222 as string) as test_hive_222 + ,cast(test_hive_223 as string) as test_hive_223 + ,cast(test_hive_224 as string) as test_hive_224 + ,cast(test_hive_226 as string) as test_hive_226 + ,cast(test_hive_227 as string) as test_hive_227 + ,cast(test_hive_228 as string) as test_hive_228 + ,cast(test_hive_229 as string) as test_hive_229 + ,cast(test_hive_230 as string) as test_hive_230 + ,cast(test_hive_231 as string) as test_hive_231 + ,cast(test_hive_232 as string) as test_hive_232 + ,cast(test_hive_233 as string) as test_hive_233 + ,cast(test_hive_234 as string) as test_hive_234 + ,cast(test_hive_235 as string) as test_hive_235 + ,cast(test_hive_236 as string) as test_hive_236 + ,cast(test_hive_237 as string) as test_hive_237 + ,cast(test_hive_238 as string) as test_hive_238 + ,cast(test_hive_239 as string) as test_hive_239 + ,cast(test_hive_1094 as int) as test_hive_1094 + ,cast(test_hive_1096 as int) as test_hive_1096 + ,cast(from_unixtime(unix_timestamp(test_hive_1099,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1099 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1103 +; + +drop view if exists test_hive_1104; + +create view test_hive_1104 +as +select + test_hive_1097 as test_hive_1097 + ,test_hive_1095 as test_hive_1095 + ,test_hive_1098 as test_hive_1098 + ,test_hive_1089 as test_hive_1089 + ,test_hive_1090 as test_hive_1090 + ,test_hive_10902 as test_hive_10902 + ,test_hive_1092 as test_hive_1092 + ,test_hive_1093 as test_hive_1093 + ,test_hive_244 as test_hive_244 + ,test_hive_225 as test_hive_225 + ,test_hive_214 as test_hive_214 + ,test_hive_215 as test_hive_215 + ,test_hive_216 as test_hive_216 + ,test_hive_217 as test_hive_217 + ,test_hive_240 as test_hive_240 + ,test_hive_241 as test_hive_241 + ,test_hive_242 as test_hive_242 + ,test_hive_243 as test_hive_243 + ,test_hive_245 as test_hive_245 + ,test_hive_246 as test_hive_246 + ,test_hive_247 as test_hive_247 + ,test_hive_248 as test_hive_248 + ,test_hive_249 as test_hive_249 + ,test_hive_250 as test_hive_250 + ,test_hive_218 as test_hive_218 + ,test_hive_219 as test_hive_219 + ,test_hive_220 as test_hive_220 + ,test_hive_221 as test_hive_221 + ,test_hive_222 as test_hive_222 + ,test_hive_223 as test_hive_223 + ,test_hive_224 as test_hive_224 + ,test_hive_226 as test_hive_226 + ,test_hive_227 as test_hive_227 + ,test_hive_228 as test_hive_228 + ,test_hive_229 as test_hive_229 + ,test_hive_230 as test_hive_230 + ,test_hive_231 as test_hive_231 + ,test_hive_232 as test_hive_232 + ,test_hive_233 as test_hive_233 + ,test_hive_234 as test_hive_234 + ,test_hive_235 as test_hive_235 + ,test_hive_236 as test_hive_236 + ,test_hive_237 as test_hive_237 + ,test_hive_238 as test_hive_238 + ,test_hive_239 as test_hive_239 + ,test_hive_1094 as test_hive_1094 + ,test_hive_1096 as test_hive_1096 + ,test_hive_1099 as test_hive_1099 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1105 t1 +; + +drop view if exists test_hive_1101; + +create view test_hive_1101 +as +select t1.* +from test_hive_1104 t1 +inner join test_hive_1102 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1250 purge; + +create table test_hive_1250 +( + test_hive_1240 string + ,test_hive_1239 string + ,test_hive_1241 string + ,test_hive_300 string + ,test_hive_288 string + ,test_hive_294 string + ,test_hive_299 string + ,test_hive_297 string + ,test_hive_285 string + ,test_hive_292 string + ,test_hive_290 string + ,test_hive_291 string + ,test_hive_303 string + ,test_hive_304 string + ,test_hive_305 string + ,test_hive_286 string + ,test_hive_293 string + ,test_hive_287 string + ,test_hive_296 string + ,test_hive_295 string + ,test_hive_298 string + ,test_hive_282 string + ,test_hive_283 string + ,test_hive_284 string + ,test_hive_289 string + ,test_hive_302 string + ,test_hive_301 string + ,test_hive_281 string + ,test_hive_1233 string + ,test_hive_1234 string + ,test_hive_12342 string + ,test_hive_1236 string + ,test_hive_1237 string + ,test_hive_1238 string + ,test_hive_1243 string + ,test_hive_1243_lag string + ,test_hive_1242 string + ,test_hive_1232 string + ,test_hive_1243_bp string + ,test_hive_1243_lag_bp string + ,test_hive_1243_con string + ,test_hive_1243_lag_con string + ,test_hive_1249 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1253 +( + test_hive_1240 string + ,test_hive_1239 string + ,test_hive_1241 string + ,test_hive_300 string + ,test_hive_288 string + ,test_hive_294 string + ,test_hive_299 string + ,test_hive_297 string + ,test_hive_285 string + ,test_hive_292 string + ,test_hive_290 string + ,test_hive_291 string + ,test_hive_303 string + ,test_hive_304 string + ,test_hive_305 string + ,test_hive_286 string + ,test_hive_293 string + ,test_hive_287 string + ,test_hive_296 string + ,test_hive_295 string + ,test_hive_298 string + ,test_hive_282 string + ,test_hive_283 string + ,test_hive_284 string + ,test_hive_289 string + ,test_hive_302 string + ,test_hive_301 string + ,test_hive_281 string + ,test_hive_1233 string + ,test_hive_1234 string + ,test_hive_12342 string + ,test_hive_1236 string + ,test_hive_1237 string + ,test_hive_1238 string + ,test_hive_1243 string + ,test_hive_1243_lag string + ,test_hive_1242 string + ,test_hive_1232 string + ,test_hive_1243_bp string + ,test_hive_1243_lag_bp string + ,test_hive_1243_con string + ,test_hive_1243_lag_con string + ,test_hive_1249 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1252 purge; + +create table if not exists test_hive_1252 +( +max_partition bigint +); + +drop view if exists test_hive_1255; + +create view if not exists test_hive_1255 +as +select + cast(test_hive_1240 as int) as test_hive_1240 + ,cast(test_hive_1239 as int) as test_hive_1239 + ,cast(test_hive_1241 as int) as test_hive_1241 + ,cast(test_hive_300 as string) as test_hive_300 + ,cast(test_hive_288 as string) as test_hive_288 + ,cast(test_hive_294 as string) as test_hive_294 + ,cast(test_hive_299 as string) as test_hive_299 + ,cast(test_hive_297 as string) as test_hive_297 + ,cast(test_hive_285 as string) as test_hive_285 + ,cast(test_hive_292 as string) as test_hive_292 + ,cast(test_hive_290 as string) as test_hive_290 + ,cast(test_hive_291 as string) as test_hive_291 + ,cast(test_hive_303 as string) as test_hive_303 + ,cast(test_hive_304 as string) as test_hive_304 + ,cast(test_hive_305 as string) as test_hive_305 + ,cast(test_hive_286 as string) as test_hive_286 + ,cast(test_hive_293 as string) as test_hive_293 + ,cast(test_hive_287 as string) as test_hive_287 + ,cast(test_hive_296 as string) as test_hive_296 + ,cast(test_hive_295 as string) as test_hive_295 + ,cast(test_hive_298 as string) as test_hive_298 + ,cast(test_hive_282 as string) as test_hive_282 + ,cast(test_hive_283 as string) as test_hive_283 + ,cast(test_hive_284 as string) as test_hive_284 + ,cast(test_hive_289 as string) as test_hive_289 + ,cast(test_hive_302 as string) as test_hive_302 + ,cast(test_hive_301 as string) as test_hive_301 + ,cast(test_hive_281 as string) as test_hive_281 + ,cast(from_unixtime(unix_timestamp(test_hive_1233,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1233 + ,cast(test_hive_1234 as string) as test_hive_1234 + ,cast(test_hive_12342 as string) as test_hive_12342 + ,cast(test_hive_1236 as string) as test_hive_1236 + ,cast(test_hive_1237 as string) as test_hive_1237 + ,cast(test_hive_1238 as string) as test_hive_1238 + ,cast(test_hive_1243 as double) as test_hive_1243 + ,cast(test_hive_1243_lag as double) as test_hive_1243_lag + ,cast(test_hive_1242 as double) as test_hive_1242 + ,cast(test_hive_1232 as double) as test_hive_1232 + ,cast(test_hive_1243_bp as double) as test_hive_1243_bp + ,cast(test_hive_1243_lag_bp as double) as test_hive_1243_lag_bp + ,cast(test_hive_1243_con as double) as test_hive_1243_con + ,cast(test_hive_1243_lag_con as double) as test_hive_1243_lag_con + ,cast(from_unixtime(unix_timestamp(test_hive_1249,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1249 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1253 +; + +drop view if exists test_hive_1254; + +create view test_hive_1254 +as +select + test_hive_1240 as test_hive_1240 + ,test_hive_1239 as test_hive_1239 + ,test_hive_1241 as test_hive_1241 + ,test_hive_300 as test_hive_300 + ,test_hive_288 as test_hive_288 + ,test_hive_294 as test_hive_294 + ,test_hive_299 as test_hive_299 + ,test_hive_297 as test_hive_297 + ,test_hive_285 as test_hive_285 + ,test_hive_292 as test_hive_292 + ,test_hive_290 as test_hive_290 + ,test_hive_291 as test_hive_291 + ,test_hive_303 as test_hive_303 + ,test_hive_304 as test_hive_304 + ,test_hive_305 as test_hive_305 + ,test_hive_286 as test_hive_286 + ,test_hive_293 as test_hive_293 + ,test_hive_287 as test_hive_287 + ,test_hive_296 as test_hive_296 + ,test_hive_295 as test_hive_295 + ,test_hive_298 as test_hive_298 + ,test_hive_282 as test_hive_282 + ,test_hive_283 as test_hive_283 + ,test_hive_284 as test_hive_284 + ,test_hive_289 as test_hive_289 + ,test_hive_302 as test_hive_302 + ,test_hive_301 as test_hive_301 + ,test_hive_281 as test_hive_281 + ,test_hive_1233 as test_hive_1233 + ,test_hive_1234 as test_hive_1234 + ,test_hive_12342 as test_hive_12342 + ,test_hive_1236 as test_hive_1236 + ,test_hive_1237 as test_hive_1237 + ,test_hive_1238 as test_hive_1238 + ,test_hive_1243 as test_hive_1243 + ,test_hive_1243_lag as test_hive_1243_lag + ,test_hive_1242 as test_hive_1242 + ,test_hive_1232 as test_hive_1232 + ,test_hive_1243_bp as test_hive_1243_bp + ,test_hive_1243_lag_bp as test_hive_1243_lag_bp + ,test_hive_1243_con as test_hive_1243_con + ,test_hive_1243_lag_con as test_hive_1243_lag_con + ,test_hive_1249 as test_hive_1249 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1255 t1 +; + +drop view if exists test_hive_1251; + +create view test_hive_1251 +as +select t1.* +from test_hive_1254 t1 +inner join test_hive_1252 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1373 purge; + +create table test_hive_1373 +( + test_hive_1370 string + ,test_hive_1367 string + ,test_hive_1371 string + ,test_hive_1366 string + ,test_hive_338 string + ,test_hive_338_txt string + ,test_hive_340 string + ,test_hive_345 string + ,test_hive_345_txt string + ,test_hive_347 string + ,test_hive_348 string + ,test_hive_370 string + ,test_hive_373 string + ,test_hive_357 string + ,test_hive_375 string + ,test_hive_359 string + ,test_hive_341 string + ,test_hive_1368 string + ,test_hive_1369 string + ,test_hive_367 string + ,test_hive_354 string + ,test_hive_360 string + ,test_hive_349 string + ,test_hive_368 string + ,test_hive_369 string + ,test_hive_355 string + ,test_hive_342 string + ,test_hive_372 string + ,test_hive_363 string + ,test_hive_351 string + ,test_hive_365 string + ,test_hive_352 string + ,test_hive_366 string + ,test_hive_353 string + ,test_hive_364 string + ,test_hive_1381 string + ,test_hive_358 string + ,test_hive_1379 string + ,test_hive_362 string + ,test_hive_1380 string + ,test_hive_361 string + ,test_hive_350 string + ,test_hive_374 string + ,test_hive_343 string + ,test_hive_343_txt string + ,test_hive_371 string + ,test_hive_356 string + ,test_hive_1372 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1376 +( + test_hive_1370 string + ,test_hive_1367 string + ,test_hive_1371 string + ,test_hive_1366 string + ,test_hive_338 string + ,test_hive_338_txt string + ,test_hive_340 string + ,test_hive_345 string + ,test_hive_345_txt string + ,test_hive_347 string + ,test_hive_348 string + ,test_hive_370 string + ,test_hive_373 string + ,test_hive_357 string + ,test_hive_375 string + ,test_hive_359 string + ,test_hive_341 string + ,test_hive_1368 string + ,test_hive_1369 string + ,test_hive_367 string + ,test_hive_354 string + ,test_hive_360 string + ,test_hive_349 string + ,test_hive_368 string + ,test_hive_369 string + ,test_hive_355 string + ,test_hive_342 string + ,test_hive_372 string + ,test_hive_363 string + ,test_hive_351 string + ,test_hive_365 string + ,test_hive_352 string + ,test_hive_366 string + ,test_hive_353 string + ,test_hive_364 string + ,test_hive_1381 string + ,test_hive_358 string + ,test_hive_1379 string + ,test_hive_362 string + ,test_hive_1380 string + ,test_hive_361 string + ,test_hive_350 string + ,test_hive_374 string + ,test_hive_343 string + ,test_hive_343_txt string + ,test_hive_371 string + ,test_hive_356 string + ,test_hive_1372 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1375 purge; + +create table if not exists test_hive_1375 +( +max_partition bigint +); + +drop view if exists test_hive_1378; + +create view if not exists test_hive_1378 +as +select + cast(test_hive_1370 as int) as test_hive_1370 + ,cast(test_hive_1367 as int) as test_hive_1367 + ,cast(test_hive_1371 as int) as test_hive_1371 + ,cast(test_hive_1366 as string) as test_hive_1366 + ,cast(test_hive_338 as string) as test_hive_338 + ,cast(test_hive_338_txt as string) as test_hive_338_txt + ,cast(test_hive_340 as string) as test_hive_340 + ,cast(test_hive_345 as string) as test_hive_345 + ,cast(test_hive_345_txt as string) as test_hive_345_txt + ,cast(test_hive_347 as string) as test_hive_347 + ,cast(test_hive_348 as string) as test_hive_348 + ,cast(test_hive_370 as string) as test_hive_370 + ,cast(test_hive_373 as string) as test_hive_373 + ,cast(test_hive_357 as string) as test_hive_357 + ,cast(test_hive_375 as string) as test_hive_375 + ,cast(test_hive_359 as string) as test_hive_359 + ,cast(test_hive_341 as string) as test_hive_341 + ,cast(test_hive_1368 as int) as test_hive_1368 + ,cast(test_hive_1369 as int) as test_hive_1369 + ,cast(test_hive_367 as string) as test_hive_367 + ,cast(test_hive_354 as string) as test_hive_354 + ,cast(test_hive_360 as string) as test_hive_360 + ,cast(test_hive_349 as string) as test_hive_349 + ,cast(test_hive_368 as string) as test_hive_368 + ,cast(test_hive_369 as string) as test_hive_369 + ,cast(test_hive_355 as string) as test_hive_355 + ,cast(from_unixtime(unix_timestamp(test_hive_342,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_342 + ,cast(test_hive_372 as string) as test_hive_372 + ,cast(test_hive_363 as string) as test_hive_363 + ,cast(test_hive_351 as string) as test_hive_351 + ,cast(test_hive_365 as string) as test_hive_365 + ,cast(test_hive_352 as string) as test_hive_352 + ,cast(test_hive_366 as string) as test_hive_366 + ,cast(test_hive_353 as string) as test_hive_353 + ,cast(test_hive_364 as string) as test_hive_364 + ,cast(test_hive_1381 as string) as test_hive_1381 + ,cast(test_hive_358 as string) as test_hive_358 + ,cast(test_hive_1379 as string) as test_hive_1379 + ,cast(test_hive_362 as string) as test_hive_362 + ,cast(test_hive_1380 as string) as test_hive_1380 + ,cast(test_hive_361 as string) as test_hive_361 + ,cast(test_hive_350 as string) as test_hive_350 + ,cast(test_hive_374 as string) as test_hive_374 + ,cast(test_hive_343 as string) as test_hive_343 + ,cast(test_hive_343_txt as string) as test_hive_343_txt + ,cast(test_hive_371 as string) as test_hive_371 + ,cast(test_hive_356 as string) as test_hive_356 + ,cast(from_unixtime(unix_timestamp(test_hive_1372,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1372 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1376 +; + +drop view if exists test_hive_1377; + +create view test_hive_1377 +as +select + test_hive_1370 as test_hive_1370 + ,test_hive_1367 as test_hive_1367 + ,test_hive_1371 as test_hive_1371 + ,test_hive_1366 as test_hive_1366 + ,test_hive_338 as test_hive_338 + ,test_hive_338_txt as test_hive_338_txt + ,test_hive_340 as test_hive_340 + ,test_hive_345 as test_hive_345 + ,test_hive_345_txt as test_hive_345_txt + ,test_hive_347 as test_hive_347 + ,test_hive_348 as test_hive_348 + ,test_hive_370 as test_hive_370 + ,test_hive_373 as test_hive_373 + ,test_hive_357 as test_hive_357 + ,test_hive_375 as test_hive_375 + ,test_hive_359 as test_hive_359 + ,test_hive_341 as test_hive_341 + ,test_hive_1368 as test_hive_1368 + ,test_hive_1369 as test_hive_1369 + ,test_hive_367 as test_hive_367 + ,test_hive_354 as test_hive_354 + ,test_hive_360 as test_hive_360 + ,test_hive_349 as test_hive_349 + ,test_hive_368 as test_hive_368 + ,test_hive_369 as test_hive_369 + ,test_hive_355 as test_hive_355 + ,test_hive_342 as test_hive_342 + ,test_hive_372 as test_hive_372 + ,test_hive_363 as test_hive_363 + ,test_hive_351 as test_hive_351 + ,test_hive_365 as test_hive_365 + ,test_hive_352 as test_hive_352 + ,test_hive_366 as test_hive_366 + ,test_hive_353 as test_hive_353 + ,test_hive_364 as test_hive_364 + ,test_hive_1381 as test_hive_1381 + ,test_hive_358 as test_hive_358 + ,test_hive_1379 as test_hive_1379 + ,test_hive_362 as test_hive_362 + ,test_hive_1380 as test_hive_1380 + ,test_hive_361 as test_hive_361 + ,test_hive_350 as test_hive_350 + ,test_hive_374 as test_hive_374 + ,test_hive_343 as test_hive_343 + ,test_hive_343_txt as test_hive_343_txt + ,test_hive_371 as test_hive_371 + ,test_hive_356 as test_hive_356 + ,test_hive_1372 as test_hive_1372 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1378 t1 +; + +drop view if exists test_hive_1374; + +create view test_hive_1374 +as +select t1.* +from test_hive_1377 t1 +inner join test_hive_1375 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1397 purge; + +create table test_hive_1397 +( + test_hive_1394 string + ,test_hive_1383 string + ,test_hive_1395 string + ,test_hive_1382 string + ,test_hive_407 string + ,test_hive_397 string + ,test_hive_379 string + ,test_hive_408 string + ,test_hive_1384 string + ,test_hive_1385 string + ,test_hive_1386 string + ,test_hive_1387 string + ,test_hive_1388 string + ,test_hive_1389 string + ,test_hive_1390 string + ,test_hive_1391 string + ,test_hive_1392 string + ,test_hive_1393 string + ,test_hive_400 string + ,test_hive_386 string + ,test_hive_409 string + ,test_hive_390 string + ,test_hive_381 string + ,test_hive_380 string + ,test_hive_382 string + ,test_hive_382_txt string + ,test_hive_410 string + ,test_hive_391 string + ,test_hive_403 string + ,test_hive_388 string + ,test_hive_405 string + ,test_hive_389 string + ,test_hive_393 string + ,test_hive_376 string + ,test_hive_394 string + ,test_hive_377 string + ,test_hive_395 string + ,test_hive_378 string + ,test_hive_406 string + ,test_hive_1406 string + ,test_hive_404 string + ,test_hive_1405 string + ,test_hive_396 string + ,test_hive_1403 string + ,test_hive_402 string + ,test_hive_1404 string + ,test_hive_398 string + ,test_hive_384 string + ,test_hive_399 string + ,test_hive_385 string + ,test_hive_411 string + ,test_hive_392 string + ,test_hive_401 string + ,test_hive_387 string + ,test_hive_1396 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1400 +( + test_hive_1394 string + ,test_hive_1383 string + ,test_hive_1395 string + ,test_hive_1382 string + ,test_hive_407 string + ,test_hive_397 string + ,test_hive_379 string + ,test_hive_408 string + ,test_hive_1384 string + ,test_hive_1385 string + ,test_hive_1386 string + ,test_hive_1387 string + ,test_hive_1388 string + ,test_hive_1389 string + ,test_hive_1390 string + ,test_hive_1391 string + ,test_hive_1392 string + ,test_hive_1393 string + ,test_hive_400 string + ,test_hive_386 string + ,test_hive_409 string + ,test_hive_390 string + ,test_hive_381 string + ,test_hive_380 string + ,test_hive_382 string + ,test_hive_382_txt string + ,test_hive_410 string + ,test_hive_391 string + ,test_hive_403 string + ,test_hive_388 string + ,test_hive_405 string + ,test_hive_389 string + ,test_hive_393 string + ,test_hive_376 string + ,test_hive_394 string + ,test_hive_377 string + ,test_hive_395 string + ,test_hive_378 string + ,test_hive_406 string + ,test_hive_1406 string + ,test_hive_404 string + ,test_hive_1405 string + ,test_hive_396 string + ,test_hive_1403 string + ,test_hive_402 string + ,test_hive_1404 string + ,test_hive_398 string + ,test_hive_384 string + ,test_hive_399 string + ,test_hive_385 string + ,test_hive_411 string + ,test_hive_392 string + ,test_hive_401 string + ,test_hive_387 string + ,test_hive_1396 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1399 purge; + +create table if not exists test_hive_1399 +( +max_partition bigint +); + +drop view if exists test_hive_1402; + +create view if not exists test_hive_1402 +as +select + cast(test_hive_1394 as int) as test_hive_1394 + ,cast(test_hive_1383 as int) as test_hive_1383 + ,cast(test_hive_1395 as int) as test_hive_1395 + ,cast(test_hive_1382 as string) as test_hive_1382 + ,cast(test_hive_407 as string) as test_hive_407 + ,cast(test_hive_397 as string) as test_hive_397 + ,cast(test_hive_379 as string) as test_hive_379 + ,cast(test_hive_408 as string) as test_hive_408 + ,cast(test_hive_1384 as int) as test_hive_1384 + ,cast(test_hive_1385 as int) as test_hive_1385 + ,cast(test_hive_1386 as int) as test_hive_1386 + ,cast(test_hive_1387 as int) as test_hive_1387 + ,cast(test_hive_1388 as int) as test_hive_1388 + ,cast(test_hive_1389 as double) as test_hive_1389 + ,cast(test_hive_1390 as double) as test_hive_1390 + ,cast(test_hive_1391 as int) as test_hive_1391 + ,cast(test_hive_1392 as int) as test_hive_1392 + ,cast(test_hive_1393 as int) as test_hive_1393 + ,cast(test_hive_400 as string) as test_hive_400 + ,cast(test_hive_386 as string) as test_hive_386 + ,cast(test_hive_409 as string) as test_hive_409 + ,cast(test_hive_390 as string) as test_hive_390 + ,cast(from_unixtime(unix_timestamp(test_hive_381,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_381 + ,cast(test_hive_380 as string) as test_hive_380 + ,cast(test_hive_382 as string) as test_hive_382 + ,cast(test_hive_382_txt as string) as test_hive_382_txt + ,cast(test_hive_410 as string) as test_hive_410 + ,cast(test_hive_391 as string) as test_hive_391 + ,cast(test_hive_403 as string) as test_hive_403 + ,cast(test_hive_388 as string) as test_hive_388 + ,cast(test_hive_405 as string) as test_hive_405 + ,cast(test_hive_389 as string) as test_hive_389 + ,cast(test_hive_393 as string) as test_hive_393 + ,cast(test_hive_376 as string) as test_hive_376 + ,cast(test_hive_394 as string) as test_hive_394 + ,cast(test_hive_377 as string) as test_hive_377 + ,cast(test_hive_395 as string) as test_hive_395 + ,cast(test_hive_378 as string) as test_hive_378 + ,cast(test_hive_406 as string) as test_hive_406 + ,cast(test_hive_1406 as string) as test_hive_1406 + ,cast(test_hive_404 as string) as test_hive_404 + ,cast(test_hive_1405 as string) as test_hive_1405 + ,cast(test_hive_396 as string) as test_hive_396 + ,cast(test_hive_1403 as string) as test_hive_1403 + ,cast(test_hive_402 as string) as test_hive_402 + ,cast(test_hive_1404 as string) as test_hive_1404 + ,cast(test_hive_398 as string) as test_hive_398 + ,cast(test_hive_384 as string) as test_hive_384 + ,cast(test_hive_399 as string) as test_hive_399 + ,cast(test_hive_385 as string) as test_hive_385 + ,cast(test_hive_411 as string) as test_hive_411 + ,cast(test_hive_392 as string) as test_hive_392 + ,cast(test_hive_401 as string) as test_hive_401 + ,cast(test_hive_387 as string) as test_hive_387 + ,cast(from_unixtime(unix_timestamp(test_hive_1396,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1396 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1400 +; + +drop view if exists test_hive_1401; + +create view test_hive_1401 +as +select + test_hive_1394 as test_hive_1394 + ,test_hive_1383 as test_hive_1383 + ,test_hive_1395 as test_hive_1395 + ,test_hive_1382 as test_hive_1382 + ,test_hive_407 as test_hive_407 + ,test_hive_397 as test_hive_397 + ,test_hive_379 as test_hive_379 + ,test_hive_408 as test_hive_408 + ,test_hive_1384 as test_hive_1384 + ,test_hive_1385 as test_hive_1385 + ,test_hive_1386 as test_hive_1386 + ,test_hive_1387 as test_hive_1387 + ,test_hive_1388 as test_hive_1388 + ,test_hive_1389 as test_hive_1389 + ,test_hive_1390 as test_hive_1390 + ,test_hive_1391 as test_hive_1391 + ,test_hive_1392 as test_hive_1392 + ,test_hive_1393 as test_hive_1393 + ,test_hive_400 as test_hive_400 + ,test_hive_386 as test_hive_386 + ,test_hive_409 as test_hive_409 + ,test_hive_390 as test_hive_390 + ,test_hive_381 as test_hive_381 + ,test_hive_380 as test_hive_380 + ,test_hive_382 as test_hive_382 + ,test_hive_382_txt as test_hive_382_txt + ,test_hive_410 as test_hive_410 + ,test_hive_391 as test_hive_391 + ,test_hive_403 as test_hive_403 + ,test_hive_388 as test_hive_388 + ,test_hive_405 as test_hive_405 + ,test_hive_389 as test_hive_389 + ,test_hive_393 as test_hive_393 + ,test_hive_376 as test_hive_376 + ,test_hive_394 as test_hive_394 + ,test_hive_377 as test_hive_377 + ,test_hive_395 as test_hive_395 + ,test_hive_378 as test_hive_378 + ,test_hive_406 as test_hive_406 + ,test_hive_1406 as test_hive_1406 + ,test_hive_404 as test_hive_404 + ,test_hive_1405 as test_hive_1405 + ,test_hive_396 as test_hive_396 + ,test_hive_1403 as test_hive_1403 + ,test_hive_402 as test_hive_402 + ,test_hive_1404 as test_hive_1404 + ,test_hive_398 as test_hive_398 + ,test_hive_384 as test_hive_384 + ,test_hive_399 as test_hive_399 + ,test_hive_385 as test_hive_385 + ,test_hive_411 as test_hive_411 + ,test_hive_392 as test_hive_392 + ,test_hive_401 as test_hive_401 + ,test_hive_387 as test_hive_387 + ,test_hive_1396 as test_hive_1396 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1402 t1 +; + +drop view if exists test_hive_1398; + +create view test_hive_1398 +as +select t1.* +from test_hive_1401 t1 +inner join test_hive_1399 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1417 purge; + +create table test_hive_1417 +( + test_hive_1411 string + ,test_hive_1407 string + ,test_hive_1412 string + ,test_hive_412 string + ,test_hive_1410 string + ,test_hive_1409 string + ,test_hive_1408 string + ,test_hive_1415 string + ,test_hive_1414 string + ,test_hive_1413 string + ,test_hive_1416 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1420 +( + test_hive_1411 string + ,test_hive_1407 string + ,test_hive_1412 string + ,test_hive_412 string + ,test_hive_1410 string + ,test_hive_1409 string + ,test_hive_1408 string + ,test_hive_1415 string + ,test_hive_1414 string + ,test_hive_1413 string + ,test_hive_1416 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1419 purge; + +create table if not exists test_hive_1419 +( +max_partition bigint +); + +drop view if exists test_hive_1422; + +create view if not exists test_hive_1422 +as +select + cast(test_hive_1411 as int) as test_hive_1411 + ,cast(test_hive_1407 as int) as test_hive_1407 + ,cast(test_hive_1412 as int) as test_hive_1412 + ,cast(test_hive_412 as string) as test_hive_412 + ,cast(test_hive_1410 as string) as test_hive_1410 + ,cast(from_unixtime(unix_timestamp(test_hive_1409,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1409 + ,cast(from_unixtime(unix_timestamp(test_hive_1408,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1408 + ,cast(test_hive_1415 as string) as test_hive_1415 + ,cast(test_hive_1414 as string) as test_hive_1414 + ,cast(test_hive_1413 as string) as test_hive_1413 + ,cast(from_unixtime(unix_timestamp(test_hive_1416,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1416 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1420 +; + +drop view if exists test_hive_1421; + +create view test_hive_1421 +as +select + test_hive_1411 as test_hive_1411 + ,test_hive_1407 as test_hive_1407 + ,test_hive_1412 as test_hive_1412 + ,test_hive_412 as test_hive_412 + ,test_hive_1410 as test_hive_1410 + ,test_hive_1409 as test_hive_1409 + ,test_hive_1408 as test_hive_1408 + ,test_hive_1415 as test_hive_1415 + ,test_hive_1414 as test_hive_1414 + ,test_hive_1413 as test_hive_1413 + ,test_hive_1416 as test_hive_1416 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1422 t1 +; + +drop view if exists test_hive_1418; + +create view test_hive_1418 +as +select t1.* +from test_hive_1421 t1 +inner join test_hive_1419 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1114 purge; + +create table test_hive_1114 +( + test_hive_1108 string + ,test_hive_1106 string + ,test_hive_1109 string + ,test_hive_272 string + ,test_hive_1107 string + ,test_hive_1112 string + ,test_hive_1111 string + ,test_hive_1110 string + ,test_hive_1113 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1117 +( + test_hive_1108 string + ,test_hive_1106 string + ,test_hive_1109 string + ,test_hive_272 string + ,test_hive_1107 string + ,test_hive_1112 string + ,test_hive_1111 string + ,test_hive_1110 string + ,test_hive_1113 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1116 purge; + +create table if not exists test_hive_1116 +( +max_partition bigint +); + +drop view if exists test_hive_1119; + +create view if not exists test_hive_1119 +as +select + cast(test_hive_1108 as int) as test_hive_1108 + ,cast(test_hive_1106 as int) as test_hive_1106 + ,cast(test_hive_1109 as int) as test_hive_1109 + ,cast(test_hive_272 as string) as test_hive_272 + ,cast(test_hive_1107 as string) as test_hive_1107 + ,cast(test_hive_1112 as string) as test_hive_1112 + ,cast(test_hive_1111 as string) as test_hive_1111 + ,cast(test_hive_1110 as string) as test_hive_1110 + ,cast(from_unixtime(unix_timestamp(test_hive_1113,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1113 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1117 +; + +drop view if exists test_hive_1118; + +create view test_hive_1118 +as +select + test_hive_1108 as test_hive_1108 + ,test_hive_1106 as test_hive_1106 + ,test_hive_1109 as test_hive_1109 + ,test_hive_272 as test_hive_272 + ,test_hive_1107 as test_hive_1107 + ,test_hive_1112 as test_hive_1112 + ,test_hive_1111 as test_hive_1111 + ,test_hive_1110 as test_hive_1110 + ,test_hive_1113 as test_hive_1113 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1119 t1 +; + +drop view if exists test_hive_1115; + +create view test_hive_1115 +as +select t1.* +from test_hive_1118 t1 +inner join test_hive_1116 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1128 purge; + +create table test_hive_1128 +( + test_hive_1122 string + ,test_hive_1120 string + ,test_hive_1123 string + ,test_hive_273 string + ,test_hive_1121 string + ,test_hive_1126 string + ,test_hive_1125 string + ,test_hive_1124 string + ,test_hive_1127 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1131 +( + test_hive_1122 string + ,test_hive_1120 string + ,test_hive_1123 string + ,test_hive_273 string + ,test_hive_1121 string + ,test_hive_1126 string + ,test_hive_1125 string + ,test_hive_1124 string + ,test_hive_1127 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1130 purge; + +create table if not exists test_hive_1130 +( +max_partition bigint +); + +drop view if exists test_hive_1133; + +create view if not exists test_hive_1133 +as +select + cast(test_hive_1122 as int) as test_hive_1122 + ,cast(test_hive_1120 as int) as test_hive_1120 + ,cast(test_hive_1123 as int) as test_hive_1123 + ,cast(test_hive_273 as string) as test_hive_273 + ,cast(test_hive_1121 as string) as test_hive_1121 + ,cast(test_hive_1126 as string) as test_hive_1126 + ,cast(test_hive_1125 as string) as test_hive_1125 + ,cast(test_hive_1124 as string) as test_hive_1124 + ,cast(from_unixtime(unix_timestamp(test_hive_1127,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1127 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1131 +; + +drop view if exists test_hive_1132; + +create view test_hive_1132 +as +select + test_hive_1122 as test_hive_1122 + ,test_hive_1120 as test_hive_1120 + ,test_hive_1123 as test_hive_1123 + ,test_hive_273 as test_hive_273 + ,test_hive_1121 as test_hive_1121 + ,test_hive_1126 as test_hive_1126 + ,test_hive_1125 as test_hive_1125 + ,test_hive_1124 as test_hive_1124 + ,test_hive_1127 as test_hive_1127 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1133 t1 +; + +drop view if exists test_hive_1129; + +create view test_hive_1129 +as +select t1.* +from test_hive_1132 t1 +inner join test_hive_1130 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1142 purge; + +create table test_hive_1142 +( + test_hive_1136 string + ,test_hive_1134 string + ,test_hive_1137 string + ,test_hive_274 string + ,test_hive_1135 string + ,test_hive_1140 string + ,test_hive_1139 string + ,test_hive_1138 string + ,test_hive_1141 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1145 +( + test_hive_1136 string + ,test_hive_1134 string + ,test_hive_1137 string + ,test_hive_274 string + ,test_hive_1135 string + ,test_hive_1140 string + ,test_hive_1139 string + ,test_hive_1138 string + ,test_hive_1141 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1144 purge; + +create table if not exists test_hive_1144 +( +max_partition bigint +); + +drop view if exists test_hive_1147; + +create view if not exists test_hive_1147 +as +select + cast(test_hive_1136 as int) as test_hive_1136 + ,cast(test_hive_1134 as int) as test_hive_1134 + ,cast(test_hive_1137 as int) as test_hive_1137 + ,cast(test_hive_274 as string) as test_hive_274 + ,cast(test_hive_1135 as string) as test_hive_1135 + ,cast(test_hive_1140 as string) as test_hive_1140 + ,cast(test_hive_1139 as string) as test_hive_1139 + ,cast(test_hive_1138 as string) as test_hive_1138 + ,cast(from_unixtime(unix_timestamp(test_hive_1141,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1141 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1145 +; + +drop view if exists test_hive_1146; + +create view test_hive_1146 +as +select + test_hive_1136 as test_hive_1136 + ,test_hive_1134 as test_hive_1134 + ,test_hive_1137 as test_hive_1137 + ,test_hive_274 as test_hive_274 + ,test_hive_1135 as test_hive_1135 + ,test_hive_1140 as test_hive_1140 + ,test_hive_1139 as test_hive_1139 + ,test_hive_1138 as test_hive_1138 + ,test_hive_1141 as test_hive_1141 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1147 t1 +; + +drop view if exists test_hive_1143; + +create view test_hive_1143 +as +select t1.* +from test_hive_1146 t1 +inner join test_hive_1144 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1158 purge; + +create table test_hive_1158 +( + test_hive_1152 string + ,test_hive_1148 string + ,test_hive_1153 string + ,test_hive_275 string + ,test_hive_1151 string + ,test_hive_1150 string + ,test_hive_1149 string + ,test_hive_1156 string + ,test_hive_1155 string + ,test_hive_1154 string + ,test_hive_1157 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1161 +( + test_hive_1152 string + ,test_hive_1148 string + ,test_hive_1153 string + ,test_hive_275 string + ,test_hive_1151 string + ,test_hive_1150 string + ,test_hive_1149 string + ,test_hive_1156 string + ,test_hive_1155 string + ,test_hive_1154 string + ,test_hive_1157 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1160 purge; + +create table if not exists test_hive_1160 +( +max_partition bigint +); + +drop view if exists test_hive_1163; + +create view if not exists test_hive_1163 +as +select + cast(test_hive_1152 as int) as test_hive_1152 + ,cast(test_hive_1148 as int) as test_hive_1148 + ,cast(test_hive_1153 as int) as test_hive_1153 + ,cast(test_hive_275 as decimal) as test_hive_275 + ,cast(test_hive_1151 as string) as test_hive_1151 + ,cast(from_unixtime(unix_timestamp(test_hive_1150,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1150 + ,cast(from_unixtime(unix_timestamp(test_hive_1149,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1149 + ,cast(test_hive_1156 as string) as test_hive_1156 + ,cast(test_hive_1155 as string) as test_hive_1155 + ,cast(test_hive_1154 as string) as test_hive_1154 + ,cast(from_unixtime(unix_timestamp(test_hive_1157,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1157 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1161 +; + +drop view if exists test_hive_1162; + +create view test_hive_1162 +as +select + test_hive_1152 as test_hive_1152 + ,test_hive_1148 as test_hive_1148 + ,test_hive_1153 as test_hive_1153 + ,test_hive_275 as test_hive_275 + ,test_hive_1151 as test_hive_1151 + ,test_hive_1150 as test_hive_1150 + ,test_hive_1149 as test_hive_1149 + ,test_hive_1156 as test_hive_1156 + ,test_hive_1155 as test_hive_1155 + ,test_hive_1154 as test_hive_1154 + ,test_hive_1157 as test_hive_1157 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1163 t1 +; + +drop view if exists test_hive_1159; + +create view test_hive_1159 +as +select t1.* +from test_hive_1162 t1 +inner join test_hive_1160 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1172 purge; + +create table test_hive_1172 +( + test_hive_1166 string + ,test_hive_1164 string + ,test_hive_1167 string + ,test_hive_276 string + ,test_hive_1165 string + ,test_hive_1170 string + ,test_hive_1169 string + ,test_hive_1168 string + ,test_hive_1171 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1175 +( + test_hive_1166 string + ,test_hive_1164 string + ,test_hive_1167 string + ,test_hive_276 string + ,test_hive_1165 string + ,test_hive_1170 string + ,test_hive_1169 string + ,test_hive_1168 string + ,test_hive_1171 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1174 purge; + +create table if not exists test_hive_1174 +( +max_partition bigint +); + +drop view if exists test_hive_1177; + +create view if not exists test_hive_1177 +as +select + cast(test_hive_1166 as int) as test_hive_1166 + ,cast(test_hive_1164 as int) as test_hive_1164 + ,cast(test_hive_1167 as int) as test_hive_1167 + ,cast(test_hive_276 as string) as test_hive_276 + ,cast(test_hive_1165 as string) as test_hive_1165 + ,cast(test_hive_1170 as string) as test_hive_1170 + ,cast(test_hive_1169 as string) as test_hive_1169 + ,cast(test_hive_1168 as string) as test_hive_1168 + ,cast(from_unixtime(unix_timestamp(test_hive_1171,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1171 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1175 +; + +drop view if exists test_hive_1176; + +create view test_hive_1176 +as +select + test_hive_1166 as test_hive_1166 + ,test_hive_1164 as test_hive_1164 + ,test_hive_1167 as test_hive_1167 + ,test_hive_276 as test_hive_276 + ,test_hive_1165 as test_hive_1165 + ,test_hive_1170 as test_hive_1170 + ,test_hive_1169 as test_hive_1169 + ,test_hive_1168 as test_hive_1168 + ,test_hive_1171 as test_hive_1171 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1177 t1 +; + +drop view if exists test_hive_1173; + +create view test_hive_1173 +as +select t1.* +from test_hive_1176 t1 +inner join test_hive_1174 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1186 purge; + +create table test_hive_1186 +( + test_hive_1180 string + ,test_hive_1178 string + ,test_hive_1181 string + ,test_hive_277 string + ,test_hive_1179 string + ,test_hive_1184 string + ,test_hive_1183 string + ,test_hive_1182 string + ,test_hive_1185 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1189 +( + test_hive_1180 string + ,test_hive_1178 string + ,test_hive_1181 string + ,test_hive_277 string + ,test_hive_1179 string + ,test_hive_1184 string + ,test_hive_1183 string + ,test_hive_1182 string + ,test_hive_1185 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1188 purge; + +create table if not exists test_hive_1188 +( +max_partition bigint +); + +drop view if exists test_hive_1191; + +create view if not exists test_hive_1191 +as +select + cast(test_hive_1180 as int) as test_hive_1180 + ,cast(test_hive_1178 as int) as test_hive_1178 + ,cast(test_hive_1181 as int) as test_hive_1181 + ,cast(test_hive_277 as string) as test_hive_277 + ,cast(test_hive_1179 as string) as test_hive_1179 + ,cast(test_hive_1184 as string) as test_hive_1184 + ,cast(test_hive_1183 as string) as test_hive_1183 + ,cast(test_hive_1182 as string) as test_hive_1182 + ,cast(from_unixtime(unix_timestamp(test_hive_1185,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1185 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1189 +; + +drop view if exists test_hive_1190; + +create view test_hive_1190 +as +select + test_hive_1180 as test_hive_1180 + ,test_hive_1178 as test_hive_1178 + ,test_hive_1181 as test_hive_1181 + ,test_hive_277 as test_hive_277 + ,test_hive_1179 as test_hive_1179 + ,test_hive_1184 as test_hive_1184 + ,test_hive_1183 as test_hive_1183 + ,test_hive_1182 as test_hive_1182 + ,test_hive_1185 as test_hive_1185 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1191 t1 +; + +drop view if exists test_hive_1187; + +create view test_hive_1187 +as +select t1.* +from test_hive_1190 t1 +inner join test_hive_1188 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1202 purge; + +create table test_hive_1202 +( + test_hive_1196 string + ,test_hive_1192 string + ,test_hive_1197 string + ,test_hive_278 string + ,test_hive_1195 string + ,test_hive_1194 string + ,test_hive_1193 string + ,test_hive_1200 string + ,test_hive_1199 string + ,test_hive_1198 string + ,test_hive_1201 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1205 +( + test_hive_1196 string + ,test_hive_1192 string + ,test_hive_1197 string + ,test_hive_278 string + ,test_hive_1195 string + ,test_hive_1194 string + ,test_hive_1193 string + ,test_hive_1200 string + ,test_hive_1199 string + ,test_hive_1198 string + ,test_hive_1201 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1204 purge; + +create table if not exists test_hive_1204 +( +max_partition bigint +); + +drop view if exists test_hive_1207; + +create view if not exists test_hive_1207 +as +select + cast(test_hive_1196 as int) as test_hive_1196 + ,cast(test_hive_1192 as int) as test_hive_1192 + ,cast(test_hive_1197 as int) as test_hive_1197 + ,cast(test_hive_278 as decimal) as test_hive_278 + ,cast(test_hive_1195 as string) as test_hive_1195 + ,cast(from_unixtime(unix_timestamp(test_hive_1194,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1194 + ,cast(from_unixtime(unix_timestamp(test_hive_1193,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1193 + ,cast(test_hive_1200 as string) as test_hive_1200 + ,cast(test_hive_1199 as string) as test_hive_1199 + ,cast(test_hive_1198 as string) as test_hive_1198 + ,cast(from_unixtime(unix_timestamp(test_hive_1201,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1201 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1205 +; + +drop view if exists test_hive_1206; + +create view test_hive_1206 +as +select + test_hive_1196 as test_hive_1196 + ,test_hive_1192 as test_hive_1192 + ,test_hive_1197 as test_hive_1197 + ,test_hive_278 as test_hive_278 + ,test_hive_1195 as test_hive_1195 + ,test_hive_1194 as test_hive_1194 + ,test_hive_1193 as test_hive_1193 + ,test_hive_1200 as test_hive_1200 + ,test_hive_1199 as test_hive_1199 + ,test_hive_1198 as test_hive_1198 + ,test_hive_1201 as test_hive_1201 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1207 t1 +; + +drop view if exists test_hive_1203; + +create view test_hive_1203 +as +select t1.* +from test_hive_1206 t1 +inner join test_hive_1204 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1312 purge; + +create table test_hive_1312 +( + test_hive_1307 string + ,test_hive_1305 string + ,test_hive_1308 string + ,test_hive_334 string + ,test_hive_1306 string + ,test_hive_1310 string + ,test_hive_1309 string + ,test_hive_1311 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1315 +( + test_hive_1307 string + ,test_hive_1305 string + ,test_hive_1308 string + ,test_hive_334 string + ,test_hive_1306 string + ,test_hive_1310 string + ,test_hive_1309 string + ,test_hive_1311 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1314 purge; + +create table if not exists test_hive_1314 +( +max_partition bigint +); + +drop view if exists test_hive_1317; + +create view if not exists test_hive_1317 +as +select + cast(test_hive_1307 as int) as test_hive_1307 + ,cast(test_hive_1305 as int) as test_hive_1305 + ,cast(test_hive_1308 as int) as test_hive_1308 + ,cast(test_hive_334 as string) as test_hive_334 + ,cast(test_hive_1306 as string) as test_hive_1306 + ,cast(test_hive_1310 as string) as test_hive_1310 + ,cast(test_hive_1309 as string) as test_hive_1309 + ,cast(from_unixtime(unix_timestamp(test_hive_1311,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1311 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1315 +; + +drop view if exists test_hive_1316; + +create view test_hive_1316 +as +select + test_hive_1307 as test_hive_1307 + ,test_hive_1305 as test_hive_1305 + ,test_hive_1308 as test_hive_1308 + ,test_hive_334 as test_hive_334 + ,test_hive_1306 as test_hive_1306 + ,test_hive_1310 as test_hive_1310 + ,test_hive_1309 as test_hive_1309 + ,test_hive_1311 as test_hive_1311 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1317 t1 +; + +drop view if exists test_hive_1313; + +create view test_hive_1313 +as +select t1.* +from test_hive_1316 t1 +inner join test_hive_1314 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1344 purge; + +create table test_hive_1344 +( + test_hive_1338 string + ,test_hive_1334 string + ,test_hive_1339 string + ,test_hive_336 string + ,test_hive_1337 string + ,test_hive_1336 string + ,test_hive_1335 string + ,test_hive_1342 string + ,test_hive_1341 string + ,test_hive_1340 string + ,test_hive_1343 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1347 +( + test_hive_1338 string + ,test_hive_1334 string + ,test_hive_1339 string + ,test_hive_336 string + ,test_hive_1337 string + ,test_hive_1336 string + ,test_hive_1335 string + ,test_hive_1342 string + ,test_hive_1341 string + ,test_hive_1340 string + ,test_hive_1343 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1346 purge; + +create table if not exists test_hive_1346 +( +max_partition bigint +); + +drop view if exists test_hive_1349; + +create view if not exists test_hive_1349 +as +select + cast(test_hive_1338 as int) as test_hive_1338 + ,cast(test_hive_1334 as int) as test_hive_1334 + ,cast(test_hive_1339 as int) as test_hive_1339 + ,cast(test_hive_336 as string) as test_hive_336 + ,cast(test_hive_1337 as string) as test_hive_1337 + ,cast(from_unixtime(unix_timestamp(test_hive_1336,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1336 + ,cast(from_unixtime(unix_timestamp(test_hive_1335,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1335 + ,cast(test_hive_1342 as string) as test_hive_1342 + ,cast(test_hive_1341 as string) as test_hive_1341 + ,cast(test_hive_1340 as string) as test_hive_1340 + ,cast(from_unixtime(unix_timestamp(test_hive_1343,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1343 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1347 +; + +drop view if exists test_hive_1348; + +create view test_hive_1348 +as +select + test_hive_1338 as test_hive_1338 + ,test_hive_1334 as test_hive_1334 + ,test_hive_1339 as test_hive_1339 + ,test_hive_336 as test_hive_336 + ,test_hive_1337 as test_hive_1337 + ,test_hive_1336 as test_hive_1336 + ,test_hive_1335 as test_hive_1335 + ,test_hive_1342 as test_hive_1342 + ,test_hive_1341 as test_hive_1341 + ,test_hive_1340 as test_hive_1340 + ,test_hive_1343 as test_hive_1343 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1349 t1 +; + +drop view if exists test_hive_1345; + +create view test_hive_1345 +as +select t1.* +from test_hive_1348 t1 +inner join test_hive_1346 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1360 purge; + +create table test_hive_1360 +( + test_hive_1354 string + ,test_hive_1350 string + ,test_hive_1355 string + ,test_hive_337 string + ,test_hive_1353 string + ,test_hive_1352 string + ,test_hive_1351 string + ,test_hive_1358 string + ,test_hive_1357 string + ,test_hive_1356 string + ,test_hive_1359 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1363 +( + test_hive_1354 string + ,test_hive_1350 string + ,test_hive_1355 string + ,test_hive_337 string + ,test_hive_1353 string + ,test_hive_1352 string + ,test_hive_1351 string + ,test_hive_1358 string + ,test_hive_1357 string + ,test_hive_1356 string + ,test_hive_1359 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1362 purge; + +create table if not exists test_hive_1362 +( +max_partition bigint +); + +drop view if exists test_hive_1365; + +create view if not exists test_hive_1365 +as +select + cast(test_hive_1354 as int) as test_hive_1354 + ,cast(test_hive_1350 as int) as test_hive_1350 + ,cast(test_hive_1355 as int) as test_hive_1355 + ,cast(test_hive_337 as string) as test_hive_337 + ,cast(test_hive_1353 as string) as test_hive_1353 + ,cast(from_unixtime(unix_timestamp(test_hive_1352,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1352 + ,cast(from_unixtime(unix_timestamp(test_hive_1351,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1351 + ,cast(test_hive_1358 as string) as test_hive_1358 + ,cast(test_hive_1357 as string) as test_hive_1357 + ,cast(test_hive_1356 as string) as test_hive_1356 + ,cast(from_unixtime(unix_timestamp(test_hive_1359,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1359 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1363 +; + +drop view if exists test_hive_1364; + +create view test_hive_1364 +as +select + test_hive_1354 as test_hive_1354 + ,test_hive_1350 as test_hive_1350 + ,test_hive_1355 as test_hive_1355 + ,test_hive_337 as test_hive_337 + ,test_hive_1353 as test_hive_1353 + ,test_hive_1352 as test_hive_1352 + ,test_hive_1351 as test_hive_1351 + ,test_hive_1358 as test_hive_1358 + ,test_hive_1357 as test_hive_1357 + ,test_hive_1356 as test_hive_1356 + ,test_hive_1359 as test_hive_1359 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1365 t1 +; + +drop view if exists test_hive_1361; + +create view test_hive_1361 +as +select t1.* +from test_hive_1364 t1 +inner join test_hive_1362 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1433 purge; + +create table test_hive_1433 +( + test_hive_1427 string + ,test_hive_1423 string + ,test_hive_1428 string + ,test_hive_413 string + ,test_hive_1426 string + ,test_hive_1425 string + ,test_hive_1424 string + ,test_hive_1431 string + ,test_hive_1430 string + ,test_hive_1429 string + ,test_hive_1432 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1436 +( + test_hive_1427 string + ,test_hive_1423 string + ,test_hive_1428 string + ,test_hive_413 string + ,test_hive_1426 string + ,test_hive_1425 string + ,test_hive_1424 string + ,test_hive_1431 string + ,test_hive_1430 string + ,test_hive_1429 string + ,test_hive_1432 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1435 purge; + +create table if not exists test_hive_1435 +( +max_partition bigint +); + +drop view if exists test_hive_1438; + +create view if not exists test_hive_1438 +as +select + cast(test_hive_1427 as int) as test_hive_1427 + ,cast(test_hive_1423 as int) as test_hive_1423 + ,cast(test_hive_1428 as int) as test_hive_1428 + ,cast(test_hive_413 as decimal) as test_hive_413 + ,cast(test_hive_1426 as string) as test_hive_1426 + ,cast(from_unixtime(unix_timestamp(test_hive_1425,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1425 + ,cast(from_unixtime(unix_timestamp(test_hive_1424,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1424 + ,cast(test_hive_1431 as string) as test_hive_1431 + ,cast(test_hive_1430 as string) as test_hive_1430 + ,cast(test_hive_1429 as string) as test_hive_1429 + ,cast(from_unixtime(unix_timestamp(test_hive_1432,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1432 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1436 +; + +drop view if exists test_hive_1437; + +create view test_hive_1437 +as +select + test_hive_1427 as test_hive_1427 + ,test_hive_1423 as test_hive_1423 + ,test_hive_1428 as test_hive_1428 + ,test_hive_413 as test_hive_413 + ,test_hive_1426 as test_hive_1426 + ,test_hive_1425 as test_hive_1425 + ,test_hive_1424 as test_hive_1424 + ,test_hive_1431 as test_hive_1431 + ,test_hive_1430 as test_hive_1430 + ,test_hive_1429 as test_hive_1429 + ,test_hive_1432 as test_hive_1432 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1438 t1 +; + +drop view if exists test_hive_1434; + +create view test_hive_1434 +as +select t1.* +from test_hive_1437 t1 +inner join test_hive_1435 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1447 purge; + +create table test_hive_1447 +( + test_hive_1441 string + ,test_hive_1439 string + ,test_hive_1442 string + ,test_hive_414 string + ,test_hive_1440 string + ,test_hive_1445 string + ,test_hive_1444 string + ,test_hive_1443 string + ,test_hive_1446 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1450 +( + test_hive_1441 string + ,test_hive_1439 string + ,test_hive_1442 string + ,test_hive_414 string + ,test_hive_1440 string + ,test_hive_1445 string + ,test_hive_1444 string + ,test_hive_1443 string + ,test_hive_1446 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1449 purge; + +create table if not exists test_hive_1449 +( +max_partition bigint +); + +drop view if exists test_hive_1452; + +create view if not exists test_hive_1452 +as +select + cast(test_hive_1441 as int) as test_hive_1441 + ,cast(test_hive_1439 as int) as test_hive_1439 + ,cast(test_hive_1442 as int) as test_hive_1442 + ,cast(test_hive_414 as string) as test_hive_414 + ,cast(test_hive_1440 as string) as test_hive_1440 + ,cast(test_hive_1445 as string) as test_hive_1445 + ,cast(test_hive_1444 as string) as test_hive_1444 + ,cast(test_hive_1443 as string) as test_hive_1443 + ,cast(from_unixtime(unix_timestamp(test_hive_1446,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1446 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1450 +; + +drop view if exists test_hive_1451; + +create view test_hive_1451 +as +select + test_hive_1441 as test_hive_1441 + ,test_hive_1439 as test_hive_1439 + ,test_hive_1442 as test_hive_1442 + ,test_hive_414 as test_hive_414 + ,test_hive_1440 as test_hive_1440 + ,test_hive_1445 as test_hive_1445 + ,test_hive_1444 as test_hive_1444 + ,test_hive_1443 as test_hive_1443 + ,test_hive_1446 as test_hive_1446 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1452 t1 +; + +drop view if exists test_hive_1448; + +create view test_hive_1448 +as +select t1.* +from test_hive_1451 t1 +inner join test_hive_1449 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1463 purge; + +create table test_hive_1463 +( + test_hive_1457 string + ,test_hive_1453 string + ,test_hive_1458 string + ,test_hive_415 string + ,test_hive_1456 string + ,test_hive_1455 string + ,test_hive_1454 string + ,test_hive_1461 string + ,test_hive_1460 string + ,test_hive_1459 string + ,test_hive_1462 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1466 +( + test_hive_1457 string + ,test_hive_1453 string + ,test_hive_1458 string + ,test_hive_415 string + ,test_hive_1456 string + ,test_hive_1455 string + ,test_hive_1454 string + ,test_hive_1461 string + ,test_hive_1460 string + ,test_hive_1459 string + ,test_hive_1462 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1465 purge; + +create table if not exists test_hive_1465 +( +max_partition bigint +); + +drop view if exists test_hive_1468; + +create view if not exists test_hive_1468 +as +select + cast(test_hive_1457 as int) as test_hive_1457 + ,cast(test_hive_1453 as int) as test_hive_1453 + ,cast(test_hive_1458 as int) as test_hive_1458 + ,cast(test_hive_415 as decimal) as test_hive_415 + ,cast(test_hive_1456 as string) as test_hive_1456 + ,cast(from_unixtime(unix_timestamp(test_hive_1455,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1455 + ,cast(from_unixtime(unix_timestamp(test_hive_1454,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1454 + ,cast(test_hive_1461 as string) as test_hive_1461 + ,cast(test_hive_1460 as string) as test_hive_1460 + ,cast(test_hive_1459 as string) as test_hive_1459 + ,cast(from_unixtime(unix_timestamp(test_hive_1462,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1462 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1466 +; + +drop view if exists test_hive_1467; + +create view test_hive_1467 +as +select + test_hive_1457 as test_hive_1457 + ,test_hive_1453 as test_hive_1453 + ,test_hive_1458 as test_hive_1458 + ,test_hive_415 as test_hive_415 + ,test_hive_1456 as test_hive_1456 + ,test_hive_1455 as test_hive_1455 + ,test_hive_1454 as test_hive_1454 + ,test_hive_1461 as test_hive_1461 + ,test_hive_1460 as test_hive_1460 + ,test_hive_1459 as test_hive_1459 + ,test_hive_1462 as test_hive_1462 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1468 t1 +; + +drop view if exists test_hive_1464; + +create view test_hive_1464 +as +select t1.* +from test_hive_1467 t1 +inner join test_hive_1465 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1477 purge; + +create table test_hive_1477 +( + test_hive_1471 string + ,test_hive_1469 string + ,test_hive_1472 string + ,test_hive_416 string + ,test_hive_1470 string + ,test_hive_1475 string + ,test_hive_1474 string + ,test_hive_1473 string + ,test_hive_1476 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1480 +( + test_hive_1471 string + ,test_hive_1469 string + ,test_hive_1472 string + ,test_hive_416 string + ,test_hive_1470 string + ,test_hive_1475 string + ,test_hive_1474 string + ,test_hive_1473 string + ,test_hive_1476 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1479 purge; + +create table if not exists test_hive_1479 +( +max_partition bigint +); + +drop view if exists test_hive_1482; + +create view if not exists test_hive_1482 +as +select + cast(test_hive_1471 as int) as test_hive_1471 + ,cast(test_hive_1469 as int) as test_hive_1469 + ,cast(test_hive_1472 as int) as test_hive_1472 + ,cast(test_hive_416 as string) as test_hive_416 + ,cast(test_hive_1470 as string) as test_hive_1470 + ,cast(test_hive_1475 as string) as test_hive_1475 + ,cast(test_hive_1474 as string) as test_hive_1474 + ,cast(test_hive_1473 as string) as test_hive_1473 + ,cast(from_unixtime(unix_timestamp(test_hive_1476,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1476 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1480 +; + +drop view if exists test_hive_1481; + +create view test_hive_1481 +as +select + test_hive_1471 as test_hive_1471 + ,test_hive_1469 as test_hive_1469 + ,test_hive_1472 as test_hive_1472 + ,test_hive_416 as test_hive_416 + ,test_hive_1470 as test_hive_1470 + ,test_hive_1475 as test_hive_1475 + ,test_hive_1474 as test_hive_1474 + ,test_hive_1473 as test_hive_1473 + ,test_hive_1476 as test_hive_1476 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1482 t1 +; + +drop view if exists test_hive_1478; + +create view test_hive_1478 +as +select t1.* +from test_hive_1481 t1 +inner join test_hive_1479 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1491 purge; + +create table test_hive_1491 +( + test_hive_1485 string + ,test_hive_1483 string + ,test_hive_1486 string + ,test_hive_417 string + ,test_hive_1484 string + ,test_hive_1489 string + ,test_hive_1488 string + ,test_hive_1487 string + ,test_hive_1490 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1494 +( + test_hive_1485 string + ,test_hive_1483 string + ,test_hive_1486 string + ,test_hive_417 string + ,test_hive_1484 string + ,test_hive_1489 string + ,test_hive_1488 string + ,test_hive_1487 string + ,test_hive_1490 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1493 purge; + +create table if not exists test_hive_1493 +( +max_partition bigint +); + +drop view if exists test_hive_1496; + +create view if not exists test_hive_1496 +as +select + cast(test_hive_1485 as int) as test_hive_1485 + ,cast(test_hive_1483 as int) as test_hive_1483 + ,cast(test_hive_1486 as int) as test_hive_1486 + ,cast(test_hive_417 as string) as test_hive_417 + ,cast(test_hive_1484 as string) as test_hive_1484 + ,cast(test_hive_1489 as string) as test_hive_1489 + ,cast(test_hive_1488 as string) as test_hive_1488 + ,cast(test_hive_1487 as string) as test_hive_1487 + ,cast(from_unixtime(unix_timestamp(test_hive_1490,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1490 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1494 +; + +drop view if exists test_hive_1495; + +create view test_hive_1495 +as +select + test_hive_1485 as test_hive_1485 + ,test_hive_1483 as test_hive_1483 + ,test_hive_1486 as test_hive_1486 + ,test_hive_417 as test_hive_417 + ,test_hive_1484 as test_hive_1484 + ,test_hive_1489 as test_hive_1489 + ,test_hive_1488 as test_hive_1488 + ,test_hive_1487 as test_hive_1487 + ,test_hive_1490 as test_hive_1490 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1496 t1 +; + +drop view if exists test_hive_1492; + +create view test_hive_1492 +as +select t1.* +from test_hive_1495 t1 +inner join test_hive_1493 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1507 purge; + +create table test_hive_1507 +( + test_hive_1501 string + ,test_hive_1497 string + ,test_hive_1502 string + ,test_hive_418 string + ,test_hive_1500 string + ,test_hive_1499 string + ,test_hive_1498 string + ,test_hive_1505 string + ,test_hive_1504 string + ,test_hive_1503 string + ,test_hive_1506 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1510 +( + test_hive_1501 string + ,test_hive_1497 string + ,test_hive_1502 string + ,test_hive_418 string + ,test_hive_1500 string + ,test_hive_1499 string + ,test_hive_1498 string + ,test_hive_1505 string + ,test_hive_1504 string + ,test_hive_1503 string + ,test_hive_1506 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1509 purge; + +create table if not exists test_hive_1509 +( +max_partition bigint +); + +drop view if exists test_hive_1512; + +create view if not exists test_hive_1512 +as +select + cast(test_hive_1501 as int) as test_hive_1501 + ,cast(test_hive_1497 as int) as test_hive_1497 + ,cast(test_hive_1502 as int) as test_hive_1502 + ,cast(test_hive_418 as decimal) as test_hive_418 + ,cast(test_hive_1500 as string) as test_hive_1500 + ,cast(from_unixtime(unix_timestamp(test_hive_1499,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1499 + ,cast(from_unixtime(unix_timestamp(test_hive_1498,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1498 + ,cast(test_hive_1505 as string) as test_hive_1505 + ,cast(test_hive_1504 as string) as test_hive_1504 + ,cast(test_hive_1503 as string) as test_hive_1503 + ,cast(from_unixtime(unix_timestamp(test_hive_1506,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1506 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1510 +; + +drop view if exists test_hive_1511; + +create view test_hive_1511 +as +select + test_hive_1501 as test_hive_1501 + ,test_hive_1497 as test_hive_1497 + ,test_hive_1502 as test_hive_1502 + ,test_hive_418 as test_hive_418 + ,test_hive_1500 as test_hive_1500 + ,test_hive_1499 as test_hive_1499 + ,test_hive_1498 as test_hive_1498 + ,test_hive_1505 as test_hive_1505 + ,test_hive_1504 as test_hive_1504 + ,test_hive_1503 as test_hive_1503 + ,test_hive_1506 as test_hive_1506 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1512 t1 +; + +drop view if exists test_hive_1508; + +create view test_hive_1508 +as +select t1.* +from test_hive_1511 t1 +inner join test_hive_1509 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1521 purge; + +create table test_hive_1521 +( + test_hive_1515 string + ,test_hive_1513 string + ,test_hive_1516 string + ,test_hive_419 string + ,test_hive_1514 string + ,test_hive_1519 string + ,test_hive_1518 string + ,test_hive_1517 string + ,test_hive_1520 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1524 +( + test_hive_1515 string + ,test_hive_1513 string + ,test_hive_1516 string + ,test_hive_419 string + ,test_hive_1514 string + ,test_hive_1519 string + ,test_hive_1518 string + ,test_hive_1517 string + ,test_hive_1520 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1523 purge; + +create table if not exists test_hive_1523 +( +max_partition bigint +); + +drop view if exists test_hive_1526; + +create view if not exists test_hive_1526 +as +select + cast(test_hive_1515 as int) as test_hive_1515 + ,cast(test_hive_1513 as int) as test_hive_1513 + ,cast(test_hive_1516 as int) as test_hive_1516 + ,cast(test_hive_419 as string) as test_hive_419 + ,cast(test_hive_1514 as string) as test_hive_1514 + ,cast(test_hive_1519 as string) as test_hive_1519 + ,cast(test_hive_1518 as string) as test_hive_1518 + ,cast(test_hive_1517 as string) as test_hive_1517 + ,cast(from_unixtime(unix_timestamp(test_hive_1520,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1520 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1524 +; + +drop view if exists test_hive_1525; + +create view test_hive_1525 +as +select + test_hive_1515 as test_hive_1515 + ,test_hive_1513 as test_hive_1513 + ,test_hive_1516 as test_hive_1516 + ,test_hive_419 as test_hive_419 + ,test_hive_1514 as test_hive_1514 + ,test_hive_1519 as test_hive_1519 + ,test_hive_1518 as test_hive_1518 + ,test_hive_1517 as test_hive_1517 + ,test_hive_1520 as test_hive_1520 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1526 t1 +; + +drop view if exists test_hive_1522; + +create view test_hive_1522 +as +select t1.* +from test_hive_1525 t1 +inner join test_hive_1523 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1537 purge; + +create table test_hive_1537 +( + test_hive_1531 string + ,test_hive_1527 string + ,test_hive_1532 string + ,test_hive_420 string + ,test_hive_1530 string + ,test_hive_1529 string + ,test_hive_1528 string + ,test_hive_1535 string + ,test_hive_1534 string + ,test_hive_1533 string + ,test_hive_1536 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1540 +( + test_hive_1531 string + ,test_hive_1527 string + ,test_hive_1532 string + ,test_hive_420 string + ,test_hive_1530 string + ,test_hive_1529 string + ,test_hive_1528 string + ,test_hive_1535 string + ,test_hive_1534 string + ,test_hive_1533 string + ,test_hive_1536 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1539 purge; + +create table if not exists test_hive_1539 +( +max_partition bigint +); + +drop view if exists test_hive_1542; + +create view if not exists test_hive_1542 +as +select + cast(test_hive_1531 as int) as test_hive_1531 + ,cast(test_hive_1527 as int) as test_hive_1527 + ,cast(test_hive_1532 as int) as test_hive_1532 + ,cast(test_hive_420 as decimal) as test_hive_420 + ,cast(test_hive_1530 as string) as test_hive_1530 + ,cast(from_unixtime(unix_timestamp(test_hive_1529,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1529 + ,cast(from_unixtime(unix_timestamp(test_hive_1528,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1528 + ,cast(test_hive_1535 as string) as test_hive_1535 + ,cast(test_hive_1534 as string) as test_hive_1534 + ,cast(test_hive_1533 as string) as test_hive_1533 + ,cast(from_unixtime(unix_timestamp(test_hive_1536,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1536 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1540 +; + +drop view if exists test_hive_1541; + +create view test_hive_1541 +as +select + test_hive_1531 as test_hive_1531 + ,test_hive_1527 as test_hive_1527 + ,test_hive_1532 as test_hive_1532 + ,test_hive_420 as test_hive_420 + ,test_hive_1530 as test_hive_1530 + ,test_hive_1529 as test_hive_1529 + ,test_hive_1528 as test_hive_1528 + ,test_hive_1535 as test_hive_1535 + ,test_hive_1534 as test_hive_1534 + ,test_hive_1533 as test_hive_1533 + ,test_hive_1536 as test_hive_1536 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1542 t1 +; + +drop view if exists test_hive_1538; + +create view test_hive_1538 +as +select t1.* +from test_hive_1541 t1 +inner join test_hive_1539 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1849 purge; + +create table test_hive_1849 +( + test_hive_1845 string + ,test_hive_1843 string + ,test_hive_1846 string + ,test_hive_445 string + ,test_hive_1844 string + ,test_hive_1847 string + ,test_hive_1848 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1852 +( + test_hive_1845 string + ,test_hive_1843 string + ,test_hive_1846 string + ,test_hive_445 string + ,test_hive_1844 string + ,test_hive_1847 string + ,test_hive_1848 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1851 purge; + +create table if not exists test_hive_1851 +( +max_partition bigint +); + +drop view if exists test_hive_1854; + +create view if not exists test_hive_1854 +as +select + cast(test_hive_1845 as int) as test_hive_1845 + ,cast(test_hive_1843 as int) as test_hive_1843 + ,cast(test_hive_1846 as int) as test_hive_1846 + ,cast(test_hive_445 as decimal) as test_hive_445 + ,cast(test_hive_1844 as string) as test_hive_1844 + ,cast(test_hive_1847 as string) as test_hive_1847 + ,cast(from_unixtime(unix_timestamp(test_hive_1848,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1848 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1852 +; + +drop view if exists test_hive_1853; + +create view test_hive_1853 +as +select + test_hive_1845 as test_hive_1845 + ,test_hive_1843 as test_hive_1843 + ,test_hive_1846 as test_hive_1846 + ,test_hive_445 as test_hive_445 + ,test_hive_1844 as test_hive_1844 + ,test_hive_1847 as test_hive_1847 + ,test_hive_1848 as test_hive_1848 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1854 t1 +; + +drop view if exists test_hive_1850; + +create view test_hive_1850 +as +select t1.* +from test_hive_1853 t1 +inner join test_hive_1851 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1861 purge; + +create table test_hive_1861 +( + test_hive_1857 string + ,test_hive_1855 string + ,test_hive_1858 string + ,test_hive_446 string + ,test_hive_1856 string + ,test_hive_1859 string + ,test_hive_1860 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1864 +( + test_hive_1857 string + ,test_hive_1855 string + ,test_hive_1858 string + ,test_hive_446 string + ,test_hive_1856 string + ,test_hive_1859 string + ,test_hive_1860 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1863 purge; + +create table if not exists test_hive_1863 +( +max_partition bigint +); + +drop view if exists test_hive_1866; + +create view if not exists test_hive_1866 +as +select + cast(test_hive_1857 as int) as test_hive_1857 + ,cast(test_hive_1855 as int) as test_hive_1855 + ,cast(test_hive_1858 as int) as test_hive_1858 + ,cast(test_hive_446 as string) as test_hive_446 + ,cast(test_hive_1856 as string) as test_hive_1856 + ,cast(test_hive_1859 as string) as test_hive_1859 + ,cast(from_unixtime(unix_timestamp(test_hive_1860,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1860 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1864 +; + +drop view if exists test_hive_1865; + +create view test_hive_1865 +as +select + test_hive_1857 as test_hive_1857 + ,test_hive_1855 as test_hive_1855 + ,test_hive_1858 as test_hive_1858 + ,test_hive_446 as test_hive_446 + ,test_hive_1856 as test_hive_1856 + ,test_hive_1859 as test_hive_1859 + ,test_hive_1860 as test_hive_1860 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1866 t1 +; + +drop view if exists test_hive_1862; + +create view test_hive_1862 +as +select t1.* +from test_hive_1865 t1 +inner join test_hive_1863 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1873 purge; + +create table test_hive_1873 +( + test_hive_1869 string + ,test_hive_1867 string + ,test_hive_1870 string + ,test_hive_447 string + ,test_hive_1868 string + ,test_hive_1871 string + ,test_hive_1872 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1876 +( + test_hive_1869 string + ,test_hive_1867 string + ,test_hive_1870 string + ,test_hive_447 string + ,test_hive_1868 string + ,test_hive_1871 string + ,test_hive_1872 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1875 purge; + +create table if not exists test_hive_1875 +( +max_partition bigint +); + +drop view if exists test_hive_1878; + +create view if not exists test_hive_1878 +as +select + cast(test_hive_1869 as int) as test_hive_1869 + ,cast(test_hive_1867 as int) as test_hive_1867 + ,cast(test_hive_1870 as int) as test_hive_1870 + ,cast(test_hive_447 as string) as test_hive_447 + ,cast(test_hive_1868 as string) as test_hive_1868 + ,cast(test_hive_1871 as string) as test_hive_1871 + ,cast(from_unixtime(unix_timestamp(test_hive_1872,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1872 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1876 +; + +drop view if exists test_hive_1877; + +create view test_hive_1877 +as +select + test_hive_1869 as test_hive_1869 + ,test_hive_1867 as test_hive_1867 + ,test_hive_1870 as test_hive_1870 + ,test_hive_447 as test_hive_447 + ,test_hive_1868 as test_hive_1868 + ,test_hive_1871 as test_hive_1871 + ,test_hive_1872 as test_hive_1872 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1878 t1 +; + +drop view if exists test_hive_1874; + +create view test_hive_1874 +as +select t1.* +from test_hive_1877 t1 +inner join test_hive_1875 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1299 purge; + +create table test_hive_1299 +( + test_hive_1288 string + ,test_hive_1287 string + ,test_hive_1289 string + ,test_hive_1282 string + ,test_hive_1285 string + ,test_hive_1283 string + ,test_hive_12832 string + ,test_hive_1286 string + ,test_hive_328 string + ,test_hive_316 string + ,test_hive_322 string + ,test_hive_327 string + ,test_hive_325 string + ,test_hive_313 string + ,test_hive_320 string + ,test_hive_318 string + ,test_hive_319 string + ,test_hive_331 string + ,test_hive_332 string + ,test_hive_333 string + ,test_hive_314 string + ,test_hive_321 string + ,test_hive_315 string + ,test_hive_324 string + ,test_hive_323 string + ,test_hive_326 string + ,test_hive_310 string + ,test_hive_311 string + ,test_hive_312 string + ,test_hive_317 string + ,test_hive_329 string + ,test_hive_330 string + ,test_hive_309 string + ,test_hive_1290 string + ,test_hive_1290_lag string + ,test_hive_1290_mil string + ,test_hive_1290_lag_mil string + ,test_hive_1290_bp string + ,test_hive_1290_bp_lag string + ,test_hive_1290_con string + ,test_hive_1290_con_lag string + ,test_hive_1298 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1302 +( + test_hive_1288 string + ,test_hive_1287 string + ,test_hive_1289 string + ,test_hive_1282 string + ,test_hive_1285 string + ,test_hive_1283 string + ,test_hive_12832 string + ,test_hive_1286 string + ,test_hive_328 string + ,test_hive_316 string + ,test_hive_322 string + ,test_hive_327 string + ,test_hive_325 string + ,test_hive_313 string + ,test_hive_320 string + ,test_hive_318 string + ,test_hive_319 string + ,test_hive_331 string + ,test_hive_332 string + ,test_hive_333 string + ,test_hive_314 string + ,test_hive_321 string + ,test_hive_315 string + ,test_hive_324 string + ,test_hive_323 string + ,test_hive_326 string + ,test_hive_310 string + ,test_hive_311 string + ,test_hive_312 string + ,test_hive_317 string + ,test_hive_329 string + ,test_hive_330 string + ,test_hive_309 string + ,test_hive_1290 string + ,test_hive_1290_lag string + ,test_hive_1290_mil string + ,test_hive_1290_lag_mil string + ,test_hive_1290_bp string + ,test_hive_1290_bp_lag string + ,test_hive_1290_con string + ,test_hive_1290_con_lag string + ,test_hive_1298 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1301 purge; + +create table if not exists test_hive_1301 +( +max_partition bigint +); + +drop view if exists test_hive_1304; + +create view if not exists test_hive_1304 +as +select + cast(test_hive_1288 as int) as test_hive_1288 + ,cast(test_hive_1287 as int) as test_hive_1287 + ,cast(test_hive_1289 as int) as test_hive_1289 + ,cast(test_hive_1282 as string) as test_hive_1282 + ,cast(test_hive_1285 as string) as test_hive_1285 + ,cast(test_hive_1283 as string) as test_hive_1283 + ,cast(test_hive_12832 as string) as test_hive_12832 + ,cast(test_hive_1286 as string) as test_hive_1286 + ,cast(test_hive_328 as string) as test_hive_328 + ,cast(test_hive_316 as string) as test_hive_316 + ,cast(test_hive_322 as string) as test_hive_322 + ,cast(test_hive_327 as string) as test_hive_327 + ,cast(test_hive_325 as string) as test_hive_325 + ,cast(test_hive_313 as string) as test_hive_313 + ,cast(test_hive_320 as string) as test_hive_320 + ,cast(test_hive_318 as string) as test_hive_318 + ,cast(test_hive_319 as string) as test_hive_319 + ,cast(test_hive_331 as string) as test_hive_331 + ,cast(test_hive_332 as string) as test_hive_332 + ,cast(test_hive_333 as string) as test_hive_333 + ,cast(test_hive_314 as string) as test_hive_314 + ,cast(test_hive_321 as string) as test_hive_321 + ,cast(test_hive_315 as string) as test_hive_315 + ,cast(test_hive_324 as string) as test_hive_324 + ,cast(test_hive_323 as string) as test_hive_323 + ,cast(test_hive_326 as string) as test_hive_326 + ,cast(test_hive_310 as string) as test_hive_310 + ,cast(test_hive_311 as string) as test_hive_311 + ,cast(test_hive_312 as string) as test_hive_312 + ,cast(test_hive_317 as string) as test_hive_317 + ,cast(test_hive_329 as string) as test_hive_329 + ,cast(test_hive_330 as string) as test_hive_330 + ,cast(test_hive_309 as string) as test_hive_309 + ,cast(test_hive_1290 as double) as test_hive_1290 + ,cast(test_hive_1290_lag as double) as test_hive_1290_lag + ,cast(test_hive_1290_mil as double) as test_hive_1290_mil + ,cast(test_hive_1290_lag_mil as double) as test_hive_1290_lag_mil + ,cast(test_hive_1290_bp as double) as test_hive_1290_bp + ,cast(test_hive_1290_bp_lag as double) as test_hive_1290_bp_lag + ,cast(test_hive_1290_con as double) as test_hive_1290_con + ,cast(test_hive_1290_con_lag as double) as test_hive_1290_con_lag + ,cast(from_unixtime(unix_timestamp(test_hive_1298,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1298 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1302 +; + +drop view if exists test_hive_1303; + +create view test_hive_1303 +as +select + test_hive_1288 as test_hive_1288 + ,test_hive_1287 as test_hive_1287 + ,test_hive_1289 as test_hive_1289 + ,test_hive_1282 as test_hive_1282 + ,test_hive_1285 as test_hive_1285 + ,test_hive_1283 as test_hive_1283 + ,test_hive_12832 as test_hive_12832 + ,test_hive_1286 as test_hive_1286 + ,test_hive_328 as test_hive_328 + ,test_hive_316 as test_hive_316 + ,test_hive_322 as test_hive_322 + ,test_hive_327 as test_hive_327 + ,test_hive_325 as test_hive_325 + ,test_hive_313 as test_hive_313 + ,test_hive_320 as test_hive_320 + ,test_hive_318 as test_hive_318 + ,test_hive_319 as test_hive_319 + ,test_hive_331 as test_hive_331 + ,test_hive_332 as test_hive_332 + ,test_hive_333 as test_hive_333 + ,test_hive_314 as test_hive_314 + ,test_hive_321 as test_hive_321 + ,test_hive_315 as test_hive_315 + ,test_hive_324 as test_hive_324 + ,test_hive_323 as test_hive_323 + ,test_hive_326 as test_hive_326 + ,test_hive_310 as test_hive_310 + ,test_hive_311 as test_hive_311 + ,test_hive_312 as test_hive_312 + ,test_hive_317 as test_hive_317 + ,test_hive_329 as test_hive_329 + ,test_hive_330 as test_hive_330 + ,test_hive_309 as test_hive_309 + ,test_hive_1290 as test_hive_1290 + ,test_hive_1290_lag as test_hive_1290_lag + ,test_hive_1290_mil as test_hive_1290_mil + ,test_hive_1290_lag_mil as test_hive_1290_lag_mil + ,test_hive_1290_bp as test_hive_1290_bp + ,test_hive_1290_bp_lag as test_hive_1290_bp_lag + ,test_hive_1290_con as test_hive_1290_con + ,test_hive_1290_con_lag as test_hive_1290_con_lag + ,test_hive_1298 as test_hive_1298 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1304 t1 +; + +drop view if exists test_hive_1300; + +create view test_hive_1300 +as +select t1.* +from test_hive_1303 t1 +inner join test_hive_1301 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_2027 purge; + +create table test_hive_2027 +( + test_hive_2021 string + ,test_hive_2019 string + ,test_hive_2022 string + ,test_hive_458 string + ,test_hive_2020 string + ,test_hive_2025 string + ,test_hive_2024 string + ,test_hive_2023 string + ,test_hive_2026 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_2030 +( + test_hive_2021 string + ,test_hive_2019 string + ,test_hive_2022 string + ,test_hive_458 string + ,test_hive_2020 string + ,test_hive_2025 string + ,test_hive_2024 string + ,test_hive_2023 string + ,test_hive_2026 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_2029 purge; + +create table if not exists test_hive_2029 +( +max_partition bigint +); + +drop view if exists test_hive_2032; + +create view if not exists test_hive_2032 +as +select + cast(test_hive_2021 as int) as test_hive_2021 + ,cast(test_hive_2019 as int) as test_hive_2019 + ,cast(test_hive_2022 as int) as test_hive_2022 + ,cast(test_hive_458 as string) as test_hive_458 + ,cast(test_hive_2020 as string) as test_hive_2020 + ,cast(test_hive_2025 as string) as test_hive_2025 + ,cast(test_hive_2024 as string) as test_hive_2024 + ,cast(test_hive_2023 as string) as test_hive_2023 + ,cast(from_unixtime(unix_timestamp(test_hive_2026,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2026 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2030 +; + +drop view if exists test_hive_2031; + +create view test_hive_2031 +as +select + test_hive_2021 as test_hive_2021 + ,test_hive_2019 as test_hive_2019 + ,test_hive_2022 as test_hive_2022 + ,test_hive_458 as test_hive_458 + ,test_hive_2020 as test_hive_2020 + ,test_hive_2025 as test_hive_2025 + ,test_hive_2024 as test_hive_2024 + ,test_hive_2023 as test_hive_2023 + ,test_hive_2026 as test_hive_2026 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2032 t1 +; + +drop view if exists test_hive_2028; + +create view test_hive_2028 +as +select t1.* +from test_hive_2031 t1 +inner join test_hive_2029 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_2013 purge; + +create table test_hive_2013 +( + test_hive_2008 string + ,test_hive_2006 string + ,test_hive_2009 string + ,test_hive_457 string + ,test_hive_2007 string + ,test_hive_2011 string + ,test_hive_2010 string + ,test_hive_2012 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_2016 +( + test_hive_2008 string + ,test_hive_2006 string + ,test_hive_2009 string + ,test_hive_457 string + ,test_hive_2007 string + ,test_hive_2011 string + ,test_hive_2010 string + ,test_hive_2012 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_2015 purge; + +create table if not exists test_hive_2015 +( +max_partition bigint +); + +drop view if exists test_hive_2018; + +create view if not exists test_hive_2018 +as +select + cast(test_hive_2008 as int) as test_hive_2008 + ,cast(test_hive_2006 as int) as test_hive_2006 + ,cast(test_hive_2009 as int) as test_hive_2009 + ,cast(test_hive_457 as string) as test_hive_457 + ,cast(test_hive_2007 as string) as test_hive_2007 + ,cast(test_hive_2011 as string) as test_hive_2011 + ,cast(test_hive_2010 as string) as test_hive_2010 + ,cast(from_unixtime(unix_timestamp(test_hive_2012,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2012 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2016 +; + +drop view if exists test_hive_2017; + +create view test_hive_2017 +as +select + test_hive_2008 as test_hive_2008 + ,test_hive_2006 as test_hive_2006 + ,test_hive_2009 as test_hive_2009 + ,test_hive_457 as test_hive_457 + ,test_hive_2007 as test_hive_2007 + ,test_hive_2011 as test_hive_2011 + ,test_hive_2010 as test_hive_2010 + ,test_hive_2012 as test_hive_2012 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2018 t1 +; + +drop view if exists test_hive_2014; + +create view test_hive_2014 +as +select t1.* +from test_hive_2017 t1 +inner join test_hive_2015 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_2000 purge; + +create table test_hive_2000 +( + test_hive_1996 string + ,test_hive_1994 string + ,test_hive_1997 string + ,test_hive_456 string + ,test_hive_1995 string + ,test_hive_1998 string + ,test_hive_1999 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_2003 +( + test_hive_1996 string + ,test_hive_1994 string + ,test_hive_1997 string + ,test_hive_456 string + ,test_hive_1995 string + ,test_hive_1998 string + ,test_hive_1999 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_2002 purge; + +create table if not exists test_hive_2002 +( +max_partition bigint +); + +drop view if exists test_hive_2005; + +create view if not exists test_hive_2005 +as +select + cast(test_hive_1996 as int) as test_hive_1996 + ,cast(test_hive_1994 as int) as test_hive_1994 + ,cast(test_hive_1997 as int) as test_hive_1997 + ,cast(test_hive_456 as string) as test_hive_456 + ,cast(test_hive_1995 as string) as test_hive_1995 + ,cast(test_hive_1998 as string) as test_hive_1998 + ,cast(from_unixtime(unix_timestamp(test_hive_1999,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1999 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2003 +; + +drop view if exists test_hive_2004; + +create view test_hive_2004 +as +select + test_hive_1996 as test_hive_1996 + ,test_hive_1994 as test_hive_1994 + ,test_hive_1997 as test_hive_1997 + ,test_hive_456 as test_hive_456 + ,test_hive_1995 as test_hive_1995 + ,test_hive_1998 as test_hive_1998 + ,test_hive_1999 as test_hive_1999 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2005 t1 +; + +drop view if exists test_hive_2001; + +create view test_hive_2001 +as +select t1.* +from test_hive_2004 t1 +inner join test_hive_2002 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1988 purge; + +create table test_hive_1988 +( + test_hive_1984 string + ,test_hive_1982 string + ,test_hive_1985 string + ,test_hive_455 string + ,test_hive_1983 string + ,test_hive_1986 string + ,test_hive_1987 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1991 +( + test_hive_1984 string + ,test_hive_1982 string + ,test_hive_1985 string + ,test_hive_455 string + ,test_hive_1983 string + ,test_hive_1986 string + ,test_hive_1987 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1990 purge; + +create table if not exists test_hive_1990 +( +max_partition bigint +); + +drop view if exists test_hive_1993; + +create view if not exists test_hive_1993 +as +select + cast(test_hive_1984 as int) as test_hive_1984 + ,cast(test_hive_1982 as int) as test_hive_1982 + ,cast(test_hive_1985 as int) as test_hive_1985 + ,cast(test_hive_455 as string) as test_hive_455 + ,cast(test_hive_1983 as string) as test_hive_1983 + ,cast(test_hive_1986 as string) as test_hive_1986 + ,cast(from_unixtime(unix_timestamp(test_hive_1987,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1987 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1991 +; + +drop view if exists test_hive_1992; + +create view test_hive_1992 +as +select + test_hive_1984 as test_hive_1984 + ,test_hive_1982 as test_hive_1982 + ,test_hive_1985 as test_hive_1985 + ,test_hive_455 as test_hive_455 + ,test_hive_1983 as test_hive_1983 + ,test_hive_1986 as test_hive_1986 + ,test_hive_1987 as test_hive_1987 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1993 t1 +; + +drop view if exists test_hive_1989; + +create view test_hive_1989 +as +select t1.* +from test_hive_1992 t1 +inner join test_hive_1990 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1976 purge; + +create table test_hive_1976 +( + test_hive_1970 string + ,test_hive_1968 string + ,test_hive_1971 string + ,test_hive_454 string + ,test_hive_1969 string + ,test_hive_1974 string + ,test_hive_1973 string + ,test_hive_1972 string + ,test_hive_1975 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1979 +( + test_hive_1970 string + ,test_hive_1968 string + ,test_hive_1971 string + ,test_hive_454 string + ,test_hive_1969 string + ,test_hive_1974 string + ,test_hive_1973 string + ,test_hive_1972 string + ,test_hive_1975 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1978 purge; + +create table if not exists test_hive_1978 +( +max_partition bigint +); + +drop view if exists test_hive_1981; + +create view if not exists test_hive_1981 +as +select + cast(test_hive_1970 as int) as test_hive_1970 + ,cast(test_hive_1968 as int) as test_hive_1968 + ,cast(test_hive_1971 as int) as test_hive_1971 + ,cast(test_hive_454 as string) as test_hive_454 + ,cast(test_hive_1969 as string) as test_hive_1969 + ,cast(test_hive_1974 as string) as test_hive_1974 + ,cast(test_hive_1973 as string) as test_hive_1973 + ,cast(test_hive_1972 as string) as test_hive_1972 + ,cast(from_unixtime(unix_timestamp(test_hive_1975,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1975 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1979 +; + +drop view if exists test_hive_1980; + +create view test_hive_1980 +as +select + test_hive_1970 as test_hive_1970 + ,test_hive_1968 as test_hive_1968 + ,test_hive_1971 as test_hive_1971 + ,test_hive_454 as test_hive_454 + ,test_hive_1969 as test_hive_1969 + ,test_hive_1974 as test_hive_1974 + ,test_hive_1973 as test_hive_1973 + ,test_hive_1972 as test_hive_1972 + ,test_hive_1975 as test_hive_1975 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1981 t1 +; + +drop view if exists test_hive_1977; + +create view test_hive_1977 +as +select t1.* +from test_hive_1980 t1 +inner join test_hive_1978 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1962 purge; + +create table test_hive_1962 +( + test_hive_1958 string + ,test_hive_1956 string + ,test_hive_1959 string + ,test_hive_453 string + ,test_hive_1957 string + ,test_hive_1960 string + ,test_hive_1961 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1965 +( + test_hive_1958 string + ,test_hive_1956 string + ,test_hive_1959 string + ,test_hive_453 string + ,test_hive_1957 string + ,test_hive_1960 string + ,test_hive_1961 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1964 purge; + +create table if not exists test_hive_1964 +( +max_partition bigint +); + +drop view if exists test_hive_1967; + +create view if not exists test_hive_1967 +as +select + cast(test_hive_1958 as int) as test_hive_1958 + ,cast(test_hive_1956 as int) as test_hive_1956 + ,cast(test_hive_1959 as int) as test_hive_1959 + ,cast(test_hive_453 as string) as test_hive_453 + ,cast(test_hive_1957 as string) as test_hive_1957 + ,cast(test_hive_1960 as string) as test_hive_1960 + ,cast(from_unixtime(unix_timestamp(test_hive_1961,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1961 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1965 +; + +drop view if exists test_hive_1966; + +create view test_hive_1966 +as +select + test_hive_1958 as test_hive_1958 + ,test_hive_1956 as test_hive_1956 + ,test_hive_1959 as test_hive_1959 + ,test_hive_453 as test_hive_453 + ,test_hive_1957 as test_hive_1957 + ,test_hive_1960 as test_hive_1960 + ,test_hive_1961 as test_hive_1961 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1967 t1 +; + +drop view if exists test_hive_1963; + +create view test_hive_1963 +as +select t1.* +from test_hive_1966 t1 +inner join test_hive_1964 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1950 purge; + +create table test_hive_1950 +( + test_hive_1946 string + ,test_hive_1944 string + ,test_hive_1947 string + ,test_hive_452 string + ,test_hive_1945 string + ,test_hive_1948 string + ,test_hive_1949 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1953 +( + test_hive_1946 string + ,test_hive_1944 string + ,test_hive_1947 string + ,test_hive_452 string + ,test_hive_1945 string + ,test_hive_1948 string + ,test_hive_1949 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1952 purge; + +create table if not exists test_hive_1952 +( +max_partition bigint +); + +drop view if exists test_hive_1955; + +create view if not exists test_hive_1955 +as +select + cast(test_hive_1946 as int) as test_hive_1946 + ,cast(test_hive_1944 as int) as test_hive_1944 + ,cast(test_hive_1947 as int) as test_hive_1947 + ,cast(test_hive_452 as string) as test_hive_452 + ,cast(test_hive_1945 as string) as test_hive_1945 + ,cast(test_hive_1948 as string) as test_hive_1948 + ,cast(from_unixtime(unix_timestamp(test_hive_1949,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1949 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1953 +; + +drop view if exists test_hive_1954; + +create view test_hive_1954 +as +select + test_hive_1946 as test_hive_1946 + ,test_hive_1944 as test_hive_1944 + ,test_hive_1947 as test_hive_1947 + ,test_hive_452 as test_hive_452 + ,test_hive_1945 as test_hive_1945 + ,test_hive_1948 as test_hive_1948 + ,test_hive_1949 as test_hive_1949 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1955 t1 +; + +drop view if exists test_hive_1951; + +create view test_hive_1951 +as +select t1.* +from test_hive_1954 t1 +inner join test_hive_1952 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1938 purge; + +create table test_hive_1938 +( + test_hive_1933 string + ,test_hive_1931 string + ,test_hive_1934 string + ,test_hive_451 string + ,test_hive_1932 string + ,test_hive_1936 string + ,test_hive_1935 string + ,test_hive_1937 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1941 +( + test_hive_1933 string + ,test_hive_1931 string + ,test_hive_1934 string + ,test_hive_451 string + ,test_hive_1932 string + ,test_hive_1936 string + ,test_hive_1935 string + ,test_hive_1937 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1940 purge; + +create table if not exists test_hive_1940 +( +max_partition bigint +); + +drop view if exists test_hive_1943; + +create view if not exists test_hive_1943 +as +select + cast(test_hive_1933 as int) as test_hive_1933 + ,cast(test_hive_1931 as int) as test_hive_1931 + ,cast(test_hive_1934 as int) as test_hive_1934 + ,cast(test_hive_451 as string) as test_hive_451 + ,cast(test_hive_1932 as string) as test_hive_1932 + ,cast(test_hive_1936 as string) as test_hive_1936 + ,cast(test_hive_1935 as string) as test_hive_1935 + ,cast(from_unixtime(unix_timestamp(test_hive_1937,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1937 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1941 +; + +drop view if exists test_hive_1942; + +create view test_hive_1942 +as +select + test_hive_1933 as test_hive_1933 + ,test_hive_1931 as test_hive_1931 + ,test_hive_1934 as test_hive_1934 + ,test_hive_451 as test_hive_451 + ,test_hive_1932 as test_hive_1932 + ,test_hive_1936 as test_hive_1936 + ,test_hive_1935 as test_hive_1935 + ,test_hive_1937 as test_hive_1937 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1943 t1 +; + +drop view if exists test_hive_1939; + +create view test_hive_1939 +as +select t1.* +from test_hive_1942 t1 +inner join test_hive_1940 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1925 purge; + +create table test_hive_1925 +( + test_hive_1919 string + ,test_hive_1916 string + ,test_hive_1920 string + ,test_hive_1918 string + ,test_hive_1917 string + ,test_hive_1923 string + ,test_hive_1922 string + ,test_hive_1921 string + ,test_hive_1924 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1928 +( + test_hive_1919 string + ,test_hive_1916 string + ,test_hive_1920 string + ,test_hive_1918 string + ,test_hive_1917 string + ,test_hive_1923 string + ,test_hive_1922 string + ,test_hive_1921 string + ,test_hive_1924 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1927 purge; + +create table if not exists test_hive_1927 +( +max_partition bigint +); + +drop view if exists test_hive_1930; + +create view if not exists test_hive_1930 +as +select + cast(test_hive_1919 as int) as test_hive_1919 + ,cast(test_hive_1916 as int) as test_hive_1916 + ,cast(test_hive_1920 as int) as test_hive_1920 + ,cast(test_hive_1918 as string) as test_hive_1918 + ,cast(test_hive_1917 as string) as test_hive_1917 + ,cast(test_hive_1923 as string) as test_hive_1923 + ,cast(test_hive_1922 as string) as test_hive_1922 + ,cast(test_hive_1921 as string) as test_hive_1921 + ,cast(from_unixtime(unix_timestamp(test_hive_1924,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1924 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1928 +; + +drop view if exists test_hive_1929; + +create view test_hive_1929 +as +select + test_hive_1919 as test_hive_1919 + ,test_hive_1916 as test_hive_1916 + ,test_hive_1920 as test_hive_1920 + ,test_hive_1918 as test_hive_1918 + ,test_hive_1917 as test_hive_1917 + ,test_hive_1923 as test_hive_1923 + ,test_hive_1922 as test_hive_1922 + ,test_hive_1921 as test_hive_1921 + ,test_hive_1924 as test_hive_1924 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1930 t1 +; + +drop view if exists test_hive_1926; + +create view test_hive_1926 +as +select t1.* +from test_hive_1929 t1 +inner join test_hive_1927 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1910 purge; + +create table test_hive_1910 +( + test_hive_1905 string + ,test_hive_1903 string + ,test_hive_1906 string + ,test_hive_450 string + ,test_hive_1904 string + ,test_hive_1908 string + ,test_hive_1907 string + ,test_hive_1909 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1913 +( + test_hive_1905 string + ,test_hive_1903 string + ,test_hive_1906 string + ,test_hive_450 string + ,test_hive_1904 string + ,test_hive_1908 string + ,test_hive_1907 string + ,test_hive_1909 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1912 purge; + +create table if not exists test_hive_1912 +( +max_partition bigint +); + +drop view if exists test_hive_1915; + +create view if not exists test_hive_1915 +as +select + cast(test_hive_1905 as int) as test_hive_1905 + ,cast(test_hive_1903 as int) as test_hive_1903 + ,cast(test_hive_1906 as int) as test_hive_1906 + ,cast(test_hive_450 as string) as test_hive_450 + ,cast(test_hive_1904 as string) as test_hive_1904 + ,cast(test_hive_1908 as string) as test_hive_1908 + ,cast(test_hive_1907 as string) as test_hive_1907 + ,cast(from_unixtime(unix_timestamp(test_hive_1909,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1909 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1913 +; + +drop view if exists test_hive_1914; + +create view test_hive_1914 +as +select + test_hive_1905 as test_hive_1905 + ,test_hive_1903 as test_hive_1903 + ,test_hive_1906 as test_hive_1906 + ,test_hive_450 as test_hive_450 + ,test_hive_1904 as test_hive_1904 + ,test_hive_1908 as test_hive_1908 + ,test_hive_1907 as test_hive_1907 + ,test_hive_1909 as test_hive_1909 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1915 t1 +; + +drop view if exists test_hive_1911; + +create view test_hive_1911 +as +select t1.* +from test_hive_1914 t1 +inner join test_hive_1912 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1897 purge; + +create table test_hive_1897 +( + test_hive_1893 string + ,test_hive_1891 string + ,test_hive_1894 string + ,test_hive_449 string + ,test_hive_1892 string + ,test_hive_1895 string + ,test_hive_1896 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1900 +( + test_hive_1893 string + ,test_hive_1891 string + ,test_hive_1894 string + ,test_hive_449 string + ,test_hive_1892 string + ,test_hive_1895 string + ,test_hive_1896 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1899 purge; + +create table if not exists test_hive_1899 +( +max_partition bigint +); + +drop view if exists test_hive_1902; + +create view if not exists test_hive_1902 +as +select + cast(test_hive_1893 as int) as test_hive_1893 + ,cast(test_hive_1891 as int) as test_hive_1891 + ,cast(test_hive_1894 as int) as test_hive_1894 + ,cast(test_hive_449 as string) as test_hive_449 + ,cast(test_hive_1892 as string) as test_hive_1892 + ,cast(test_hive_1895 as string) as test_hive_1895 + ,cast(from_unixtime(unix_timestamp(test_hive_1896,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1896 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1900 +; + +drop view if exists test_hive_1901; + +create view test_hive_1901 +as +select + test_hive_1893 as test_hive_1893 + ,test_hive_1891 as test_hive_1891 + ,test_hive_1894 as test_hive_1894 + ,test_hive_449 as test_hive_449 + ,test_hive_1892 as test_hive_1892 + ,test_hive_1895 as test_hive_1895 + ,test_hive_1896 as test_hive_1896 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1902 t1 +; + +drop view if exists test_hive_1898; + +create view test_hive_1898 +as +select t1.* +from test_hive_1901 t1 +inner join test_hive_1899 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1885 purge; + +create table test_hive_1885 +( + test_hive_1881 string + ,test_hive_1879 string + ,test_hive_1882 string + ,test_hive_448 string + ,test_hive_1880 string + ,test_hive_1883 string + ,test_hive_1884 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1888 +( + test_hive_1881 string + ,test_hive_1879 string + ,test_hive_1882 string + ,test_hive_448 string + ,test_hive_1880 string + ,test_hive_1883 string + ,test_hive_1884 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1887 purge; + +create table if not exists test_hive_1887 +( +max_partition bigint +); + +drop view if exists test_hive_1890; + +create view if not exists test_hive_1890 +as +select + cast(test_hive_1881 as int) as test_hive_1881 + ,cast(test_hive_1879 as int) as test_hive_1879 + ,cast(test_hive_1882 as int) as test_hive_1882 + ,cast(test_hive_448 as string) as test_hive_448 + ,cast(test_hive_1880 as string) as test_hive_1880 + ,cast(test_hive_1883 as string) as test_hive_1883 + ,cast(from_unixtime(unix_timestamp(test_hive_1884,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1884 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1888 +; + +drop view if exists test_hive_1889; + +create view test_hive_1889 +as +select + test_hive_1881 as test_hive_1881 + ,test_hive_1879 as test_hive_1879 + ,test_hive_1882 as test_hive_1882 + ,test_hive_448 as test_hive_448 + ,test_hive_1880 as test_hive_1880 + ,test_hive_1883 as test_hive_1883 + ,test_hive_1884 as test_hive_1884 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1890 t1 +; + +drop view if exists test_hive_1886; + +create view test_hive_1886 +as +select t1.* +from test_hive_1889 t1 +inner join test_hive_1887 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1837 purge; + +create table test_hive_1837 +( + test_hive_1832 string + ,test_hive_1830 string + ,test_hive_1833 string + ,test_hive_444 string + ,test_hive_1831 string + ,test_hive_1835 string + ,test_hive_1834 string + ,test_hive_1836 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1840 +( + test_hive_1832 string + ,test_hive_1830 string + ,test_hive_1833 string + ,test_hive_444 string + ,test_hive_1831 string + ,test_hive_1835 string + ,test_hive_1834 string + ,test_hive_1836 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1839 purge; + +create table if not exists test_hive_1839 +( +max_partition bigint +); + +drop view if exists test_hive_1842; + +create view if not exists test_hive_1842 +as +select + cast(test_hive_1832 as int) as test_hive_1832 + ,cast(test_hive_1830 as int) as test_hive_1830 + ,cast(test_hive_1833 as int) as test_hive_1833 + ,cast(test_hive_444 as string) as test_hive_444 + ,cast(test_hive_1831 as string) as test_hive_1831 + ,cast(test_hive_1835 as string) as test_hive_1835 + ,cast(test_hive_1834 as string) as test_hive_1834 + ,cast(from_unixtime(unix_timestamp(test_hive_1836,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1836 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1840 +; + +drop view if exists test_hive_1841; + +create view test_hive_1841 +as +select + test_hive_1832 as test_hive_1832 + ,test_hive_1830 as test_hive_1830 + ,test_hive_1833 as test_hive_1833 + ,test_hive_444 as test_hive_444 + ,test_hive_1831 as test_hive_1831 + ,test_hive_1835 as test_hive_1835 + ,test_hive_1834 as test_hive_1834 + ,test_hive_1836 as test_hive_1836 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1842 t1 +; + +drop view if exists test_hive_1838; + +create view test_hive_1838 +as +select t1.* +from test_hive_1841 t1 +inner join test_hive_1839 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1824 purge; + +create table test_hive_1824 +( + test_hive_1818 string + ,test_hive_1816 string + ,test_hive_1819 string + ,test_hive_443 string + ,test_hive_1817 string + ,test_hive_1822 string + ,test_hive_1821 string + ,test_hive_1820 string + ,test_hive_1823 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1827 +( + test_hive_1818 string + ,test_hive_1816 string + ,test_hive_1819 string + ,test_hive_443 string + ,test_hive_1817 string + ,test_hive_1822 string + ,test_hive_1821 string + ,test_hive_1820 string + ,test_hive_1823 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1826 purge; + +create table if not exists test_hive_1826 +( +max_partition bigint +); + +drop view if exists test_hive_1829; + +create view if not exists test_hive_1829 +as +select + cast(test_hive_1818 as int) as test_hive_1818 + ,cast(test_hive_1816 as int) as test_hive_1816 + ,cast(test_hive_1819 as int) as test_hive_1819 + ,cast(test_hive_443 as string) as test_hive_443 + ,cast(test_hive_1817 as string) as test_hive_1817 + ,cast(test_hive_1822 as string) as test_hive_1822 + ,cast(test_hive_1821 as string) as test_hive_1821 + ,cast(test_hive_1820 as string) as test_hive_1820 + ,cast(from_unixtime(unix_timestamp(test_hive_1823,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1823 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1827 +; + +drop view if exists test_hive_1828; + +create view test_hive_1828 +as +select + test_hive_1818 as test_hive_1818 + ,test_hive_1816 as test_hive_1816 + ,test_hive_1819 as test_hive_1819 + ,test_hive_443 as test_hive_443 + ,test_hive_1817 as test_hive_1817 + ,test_hive_1822 as test_hive_1822 + ,test_hive_1821 as test_hive_1821 + ,test_hive_1820 as test_hive_1820 + ,test_hive_1823 as test_hive_1823 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1829 t1 +; + +drop view if exists test_hive_1825; + +create view test_hive_1825 +as +select t1.* +from test_hive_1828 t1 +inner join test_hive_1826 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1810 purge; + +create table test_hive_1810 +( + test_hive_1804 string + ,test_hive_1802 string + ,test_hive_1805 string + ,test_hive_442 string + ,test_hive_1803 string + ,test_hive_1808 string + ,test_hive_1807 string + ,test_hive_1806 string + ,test_hive_1809 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1813 +( + test_hive_1804 string + ,test_hive_1802 string + ,test_hive_1805 string + ,test_hive_442 string + ,test_hive_1803 string + ,test_hive_1808 string + ,test_hive_1807 string + ,test_hive_1806 string + ,test_hive_1809 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1812 purge; + +create table if not exists test_hive_1812 +( +max_partition bigint +); + +drop view if exists test_hive_1815; + +create view if not exists test_hive_1815 +as +select + cast(test_hive_1804 as int) as test_hive_1804 + ,cast(test_hive_1802 as int) as test_hive_1802 + ,cast(test_hive_1805 as int) as test_hive_1805 + ,cast(test_hive_442 as string) as test_hive_442 + ,cast(test_hive_1803 as string) as test_hive_1803 + ,cast(test_hive_1808 as string) as test_hive_1808 + ,cast(test_hive_1807 as string) as test_hive_1807 + ,cast(test_hive_1806 as string) as test_hive_1806 + ,cast(from_unixtime(unix_timestamp(test_hive_1809,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1809 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1813 +; + +drop view if exists test_hive_1814; + +create view test_hive_1814 +as +select + test_hive_1804 as test_hive_1804 + ,test_hive_1802 as test_hive_1802 + ,test_hive_1805 as test_hive_1805 + ,test_hive_442 as test_hive_442 + ,test_hive_1803 as test_hive_1803 + ,test_hive_1808 as test_hive_1808 + ,test_hive_1807 as test_hive_1807 + ,test_hive_1806 as test_hive_1806 + ,test_hive_1809 as test_hive_1809 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1815 t1 +; + +drop view if exists test_hive_1811; + +create view test_hive_1811 +as +select t1.* +from test_hive_1814 t1 +inner join test_hive_1812 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1796 purge; + +create table test_hive_1796 +( + test_hive_1790 string + ,test_hive_1788 string + ,test_hive_1791 string + ,test_hive_441 string + ,test_hive_1789 string + ,test_hive_1794 string + ,test_hive_1793 string + ,test_hive_1792 string + ,test_hive_1795 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1799 +( + test_hive_1790 string + ,test_hive_1788 string + ,test_hive_1791 string + ,test_hive_441 string + ,test_hive_1789 string + ,test_hive_1794 string + ,test_hive_1793 string + ,test_hive_1792 string + ,test_hive_1795 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1798 purge; + +create table if not exists test_hive_1798 +( +max_partition bigint +); + +drop view if exists test_hive_1801; + +create view if not exists test_hive_1801 +as +select + cast(test_hive_1790 as int) as test_hive_1790 + ,cast(test_hive_1788 as int) as test_hive_1788 + ,cast(test_hive_1791 as int) as test_hive_1791 + ,cast(test_hive_441 as string) as test_hive_441 + ,cast(test_hive_1789 as string) as test_hive_1789 + ,cast(test_hive_1794 as string) as test_hive_1794 + ,cast(test_hive_1793 as string) as test_hive_1793 + ,cast(test_hive_1792 as string) as test_hive_1792 + ,cast(from_unixtime(unix_timestamp(test_hive_1795,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1795 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1799 +; + +drop view if exists test_hive_1800; + +create view test_hive_1800 +as +select + test_hive_1790 as test_hive_1790 + ,test_hive_1788 as test_hive_1788 + ,test_hive_1791 as test_hive_1791 + ,test_hive_441 as test_hive_441 + ,test_hive_1789 as test_hive_1789 + ,test_hive_1794 as test_hive_1794 + ,test_hive_1793 as test_hive_1793 + ,test_hive_1792 as test_hive_1792 + ,test_hive_1795 as test_hive_1795 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1801 t1 +; + +drop view if exists test_hive_1797; + +create view test_hive_1797 +as +select t1.* +from test_hive_1800 t1 +inner join test_hive_1798 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1782 purge; + +create table test_hive_1782 +( + test_hive_1776 string + ,test_hive_1774 string + ,test_hive_1777 string + ,test_hive_440 string + ,test_hive_1775 string + ,test_hive_1780 string + ,test_hive_1779 string + ,test_hive_1778 string + ,test_hive_1781 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1785 +( + test_hive_1776 string + ,test_hive_1774 string + ,test_hive_1777 string + ,test_hive_440 string + ,test_hive_1775 string + ,test_hive_1780 string + ,test_hive_1779 string + ,test_hive_1778 string + ,test_hive_1781 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1784 purge; + +create table if not exists test_hive_1784 +( +max_partition bigint +); + +drop view if exists test_hive_1787; + +create view if not exists test_hive_1787 +as +select + cast(test_hive_1776 as int) as test_hive_1776 + ,cast(test_hive_1774 as int) as test_hive_1774 + ,cast(test_hive_1777 as int) as test_hive_1777 + ,cast(test_hive_440 as string) as test_hive_440 + ,cast(test_hive_1775 as string) as test_hive_1775 + ,cast(test_hive_1780 as string) as test_hive_1780 + ,cast(test_hive_1779 as string) as test_hive_1779 + ,cast(test_hive_1778 as string) as test_hive_1778 + ,cast(from_unixtime(unix_timestamp(test_hive_1781,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1781 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1785 +; + +drop view if exists test_hive_1786; + +create view test_hive_1786 +as +select + test_hive_1776 as test_hive_1776 + ,test_hive_1774 as test_hive_1774 + ,test_hive_1777 as test_hive_1777 + ,test_hive_440 as test_hive_440 + ,test_hive_1775 as test_hive_1775 + ,test_hive_1780 as test_hive_1780 + ,test_hive_1779 as test_hive_1779 + ,test_hive_1778 as test_hive_1778 + ,test_hive_1781 as test_hive_1781 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1787 t1 +; + +drop view if exists test_hive_1783; + +create view test_hive_1783 +as +select t1.* +from test_hive_1786 t1 +inner join test_hive_1784 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1768 purge; + +create table test_hive_1768 +( + test_hive_1764 string + ,test_hive_1762 string + ,test_hive_1765 string + ,test_hive_438 string + ,test_hive_439 string + ,test_hive_1763 string + ,test_hive_1766 string + ,test_hive_1767 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1771 +( + test_hive_1764 string + ,test_hive_1762 string + ,test_hive_1765 string + ,test_hive_438 string + ,test_hive_439 string + ,test_hive_1763 string + ,test_hive_1766 string + ,test_hive_1767 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1770 purge; + +create table if not exists test_hive_1770 +( +max_partition bigint +); + +drop view if exists test_hive_1773; + +create view if not exists test_hive_1773 +as +select + cast(test_hive_1764 as int) as test_hive_1764 + ,cast(test_hive_1762 as int) as test_hive_1762 + ,cast(test_hive_1765 as int) as test_hive_1765 + ,cast(test_hive_438 as string) as test_hive_438 + ,cast(test_hive_439 as string) as test_hive_439 + ,cast(test_hive_1763 as string) as test_hive_1763 + ,cast(test_hive_1766 as string) as test_hive_1766 + ,cast(from_unixtime(unix_timestamp(test_hive_1767,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1767 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1771 +; + +drop view if exists test_hive_1772; + +create view test_hive_1772 +as +select + test_hive_1764 as test_hive_1764 + ,test_hive_1762 as test_hive_1762 + ,test_hive_1765 as test_hive_1765 + ,test_hive_438 as test_hive_438 + ,test_hive_439 as test_hive_439 + ,test_hive_1763 as test_hive_1763 + ,test_hive_1766 as test_hive_1766 + ,test_hive_1767 as test_hive_1767 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1773 t1 +; + +drop view if exists test_hive_1769; + +create view test_hive_1769 +as +select t1.* +from test_hive_1772 t1 +inner join test_hive_1770 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1756 purge; + +create table test_hive_1756 +( + test_hive_1752 string + ,test_hive_1750 string + ,test_hive_1753 string + ,test_hive_437 string + ,test_hive_1751 string + ,test_hive_1754 string + ,test_hive_1755 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1759 +( + test_hive_1752 string + ,test_hive_1750 string + ,test_hive_1753 string + ,test_hive_437 string + ,test_hive_1751 string + ,test_hive_1754 string + ,test_hive_1755 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1758 purge; + +create table if not exists test_hive_1758 +( +max_partition bigint +); + +drop view if exists test_hive_1761; + +create view if not exists test_hive_1761 +as +select + cast(test_hive_1752 as int) as test_hive_1752 + ,cast(test_hive_1750 as int) as test_hive_1750 + ,cast(test_hive_1753 as int) as test_hive_1753 + ,cast(test_hive_437 as string) as test_hive_437 + ,cast(test_hive_1751 as string) as test_hive_1751 + ,cast(test_hive_1754 as string) as test_hive_1754 + ,cast(from_unixtime(unix_timestamp(test_hive_1755,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1755 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1759 +; + +drop view if exists test_hive_1760; + +create view test_hive_1760 +as +select + test_hive_1752 as test_hive_1752 + ,test_hive_1750 as test_hive_1750 + ,test_hive_1753 as test_hive_1753 + ,test_hive_437 as test_hive_437 + ,test_hive_1751 as test_hive_1751 + ,test_hive_1754 as test_hive_1754 + ,test_hive_1755 as test_hive_1755 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1761 t1 +; + +drop view if exists test_hive_1757; + +create view test_hive_1757 +as +select t1.* +from test_hive_1760 t1 +inner join test_hive_1758 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1744 purge; + +create table test_hive_1744 +( + test_hive_1740 string + ,test_hive_1738 string + ,test_hive_1741 string + ,test_hive_436 string + ,test_hive_1739 string + ,test_hive_1742 string + ,test_hive_1743 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1747 +( + test_hive_1740 string + ,test_hive_1738 string + ,test_hive_1741 string + ,test_hive_436 string + ,test_hive_1739 string + ,test_hive_1742 string + ,test_hive_1743 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1746 purge; + +create table if not exists test_hive_1746 +( +max_partition bigint +); + +drop view if exists test_hive_1749; + +create view if not exists test_hive_1749 +as +select + cast(test_hive_1740 as int) as test_hive_1740 + ,cast(test_hive_1738 as int) as test_hive_1738 + ,cast(test_hive_1741 as int) as test_hive_1741 + ,cast(test_hive_436 as string) as test_hive_436 + ,cast(test_hive_1739 as string) as test_hive_1739 + ,cast(test_hive_1742 as string) as test_hive_1742 + ,cast(from_unixtime(unix_timestamp(test_hive_1743,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1743 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1747 +; + +drop view if exists test_hive_1748; + +create view test_hive_1748 +as +select + test_hive_1740 as test_hive_1740 + ,test_hive_1738 as test_hive_1738 + ,test_hive_1741 as test_hive_1741 + ,test_hive_436 as test_hive_436 + ,test_hive_1739 as test_hive_1739 + ,test_hive_1742 as test_hive_1742 + ,test_hive_1743 as test_hive_1743 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1749 t1 +; + +drop view if exists test_hive_1745; + +create view test_hive_1745 +as +select t1.* +from test_hive_1748 t1 +inner join test_hive_1746 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1732 purge; + +create table test_hive_1732 +( + test_hive_1728 string + ,test_hive_1726 string + ,test_hive_1729 string + ,test_hive_435 string + ,test_hive_1727 string + ,test_hive_1730 string + ,test_hive_1731 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1735 +( + test_hive_1728 string + ,test_hive_1726 string + ,test_hive_1729 string + ,test_hive_435 string + ,test_hive_1727 string + ,test_hive_1730 string + ,test_hive_1731 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1734 purge; + +create table if not exists test_hive_1734 +( +max_partition bigint +); + +drop view if exists test_hive_1737; + +create view if not exists test_hive_1737 +as +select + cast(test_hive_1728 as int) as test_hive_1728 + ,cast(test_hive_1726 as int) as test_hive_1726 + ,cast(test_hive_1729 as int) as test_hive_1729 + ,cast(test_hive_435 as string) as test_hive_435 + ,cast(test_hive_1727 as string) as test_hive_1727 + ,cast(test_hive_1730 as string) as test_hive_1730 + ,cast(from_unixtime(unix_timestamp(test_hive_1731,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1731 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1735 +; + +drop view if exists test_hive_1736; + +create view test_hive_1736 +as +select + test_hive_1728 as test_hive_1728 + ,test_hive_1726 as test_hive_1726 + ,test_hive_1729 as test_hive_1729 + ,test_hive_435 as test_hive_435 + ,test_hive_1727 as test_hive_1727 + ,test_hive_1730 as test_hive_1730 + ,test_hive_1731 as test_hive_1731 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1737 t1 +; + +drop view if exists test_hive_1733; + +create view test_hive_1733 +as +select t1.* +from test_hive_1736 t1 +inner join test_hive_1734 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1720 purge; + +create table test_hive_1720 +( + test_hive_1714 string + ,test_hive_1712 string + ,test_hive_1715 string + ,test_hive_434 string + ,test_hive_1713 string + ,test_hive_1718 string + ,test_hive_1717 string + ,test_hive_1716 string + ,test_hive_1719 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1723 +( + test_hive_1714 string + ,test_hive_1712 string + ,test_hive_1715 string + ,test_hive_434 string + ,test_hive_1713 string + ,test_hive_1718 string + ,test_hive_1717 string + ,test_hive_1716 string + ,test_hive_1719 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1722 purge; + +create table if not exists test_hive_1722 +( +max_partition bigint +); + +drop view if exists test_hive_1725; + +create view if not exists test_hive_1725 +as +select + cast(test_hive_1714 as int) as test_hive_1714 + ,cast(test_hive_1712 as int) as test_hive_1712 + ,cast(test_hive_1715 as int) as test_hive_1715 + ,cast(test_hive_434 as string) as test_hive_434 + ,cast(test_hive_1713 as string) as test_hive_1713 + ,cast(test_hive_1718 as string) as test_hive_1718 + ,cast(test_hive_1717 as string) as test_hive_1717 + ,cast(test_hive_1716 as string) as test_hive_1716 + ,cast(from_unixtime(unix_timestamp(test_hive_1719,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1719 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1723 +; + +drop view if exists test_hive_1724; + +create view test_hive_1724 +as +select + test_hive_1714 as test_hive_1714 + ,test_hive_1712 as test_hive_1712 + ,test_hive_1715 as test_hive_1715 + ,test_hive_434 as test_hive_434 + ,test_hive_1713 as test_hive_1713 + ,test_hive_1718 as test_hive_1718 + ,test_hive_1717 as test_hive_1717 + ,test_hive_1716 as test_hive_1716 + ,test_hive_1719 as test_hive_1719 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1725 t1 +; + +drop view if exists test_hive_1721; + +create view test_hive_1721 +as +select t1.* +from test_hive_1724 t1 +inner join test_hive_1722 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1706 purge; + +create table test_hive_1706 +( + test_hive_1700 string + ,test_hive_1698 string + ,test_hive_1701 string + ,test_hive_433 string + ,test_hive_1699 string + ,test_hive_1704 string + ,test_hive_1703 string + ,test_hive_1702 string + ,test_hive_1705 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1709 +( + test_hive_1700 string + ,test_hive_1698 string + ,test_hive_1701 string + ,test_hive_433 string + ,test_hive_1699 string + ,test_hive_1704 string + ,test_hive_1703 string + ,test_hive_1702 string + ,test_hive_1705 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1708 purge; + +create table if not exists test_hive_1708 +( +max_partition bigint +); + +drop view if exists test_hive_1711; + +create view if not exists test_hive_1711 +as +select + cast(test_hive_1700 as int) as test_hive_1700 + ,cast(test_hive_1698 as int) as test_hive_1698 + ,cast(test_hive_1701 as int) as test_hive_1701 + ,cast(test_hive_433 as string) as test_hive_433 + ,cast(test_hive_1699 as string) as test_hive_1699 + ,cast(test_hive_1704 as string) as test_hive_1704 + ,cast(test_hive_1703 as string) as test_hive_1703 + ,cast(test_hive_1702 as string) as test_hive_1702 + ,cast(from_unixtime(unix_timestamp(test_hive_1705,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1705 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1709 +; + +drop view if exists test_hive_1710; + +create view test_hive_1710 +as +select + test_hive_1700 as test_hive_1700 + ,test_hive_1698 as test_hive_1698 + ,test_hive_1701 as test_hive_1701 + ,test_hive_433 as test_hive_433 + ,test_hive_1699 as test_hive_1699 + ,test_hive_1704 as test_hive_1704 + ,test_hive_1703 as test_hive_1703 + ,test_hive_1702 as test_hive_1702 + ,test_hive_1705 as test_hive_1705 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1711 t1 +; + +drop view if exists test_hive_1707; + +create view test_hive_1707 +as +select t1.* +from test_hive_1710 t1 +inner join test_hive_1708 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1692 purge; + +create table test_hive_1692 +( + test_hive_1688 string + ,test_hive_1686 string + ,test_hive_1689 string + ,test_hive_432 string + ,test_hive_1687 string + ,test_hive_1690 string + ,test_hive_1691 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1695 +( + test_hive_1688 string + ,test_hive_1686 string + ,test_hive_1689 string + ,test_hive_432 string + ,test_hive_1687 string + ,test_hive_1690 string + ,test_hive_1691 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1694 purge; + +create table if not exists test_hive_1694 +( +max_partition bigint +); + +drop view if exists test_hive_1697; + +create view if not exists test_hive_1697 +as +select + cast(test_hive_1688 as int) as test_hive_1688 + ,cast(test_hive_1686 as int) as test_hive_1686 + ,cast(test_hive_1689 as int) as test_hive_1689 + ,cast(test_hive_432 as string) as test_hive_432 + ,cast(test_hive_1687 as string) as test_hive_1687 + ,cast(test_hive_1690 as string) as test_hive_1690 + ,cast(from_unixtime(unix_timestamp(test_hive_1691,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1691 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1695 +; + +drop view if exists test_hive_1696; + +create view test_hive_1696 +as +select + test_hive_1688 as test_hive_1688 + ,test_hive_1686 as test_hive_1686 + ,test_hive_1689 as test_hive_1689 + ,test_hive_432 as test_hive_432 + ,test_hive_1687 as test_hive_1687 + ,test_hive_1690 as test_hive_1690 + ,test_hive_1691 as test_hive_1691 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1697 t1 +; + +drop view if exists test_hive_1693; + +create view test_hive_1693 +as +select t1.* +from test_hive_1696 t1 +inner join test_hive_1694 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1680 purge; + +create table test_hive_1680 +( + test_hive_1676 string + ,test_hive_1674 string + ,test_hive_1677 string + ,test_hive_431 string + ,test_hive_1675 string + ,test_hive_1678 string + ,test_hive_1679 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1683 +( + test_hive_1676 string + ,test_hive_1674 string + ,test_hive_1677 string + ,test_hive_431 string + ,test_hive_1675 string + ,test_hive_1678 string + ,test_hive_1679 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1682 purge; + +create table if not exists test_hive_1682 +( +max_partition bigint +); + +drop view if exists test_hive_1685; + +create view if not exists test_hive_1685 +as +select + cast(test_hive_1676 as int) as test_hive_1676 + ,cast(test_hive_1674 as int) as test_hive_1674 + ,cast(test_hive_1677 as int) as test_hive_1677 + ,cast(test_hive_431 as string) as test_hive_431 + ,cast(test_hive_1675 as string) as test_hive_1675 + ,cast(test_hive_1678 as string) as test_hive_1678 + ,cast(from_unixtime(unix_timestamp(test_hive_1679,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1679 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1683 +; + +drop view if exists test_hive_1684; + +create view test_hive_1684 +as +select + test_hive_1676 as test_hive_1676 + ,test_hive_1674 as test_hive_1674 + ,test_hive_1677 as test_hive_1677 + ,test_hive_431 as test_hive_431 + ,test_hive_1675 as test_hive_1675 + ,test_hive_1678 as test_hive_1678 + ,test_hive_1679 as test_hive_1679 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1685 t1 +; + +drop view if exists test_hive_1681; + +create view test_hive_1681 +as +select t1.* +from test_hive_1684 t1 +inner join test_hive_1682 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1668 purge; + +create table test_hive_1668 +( + test_hive_1662 string + ,test_hive_1660 string + ,test_hive_1663 string + ,test_hive_430 string + ,test_hive_1661 string + ,test_hive_1666 string + ,test_hive_1665 string + ,test_hive_1664 string + ,test_hive_1667 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1671 +( + test_hive_1662 string + ,test_hive_1660 string + ,test_hive_1663 string + ,test_hive_430 string + ,test_hive_1661 string + ,test_hive_1666 string + ,test_hive_1665 string + ,test_hive_1664 string + ,test_hive_1667 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1670 purge; + +create table if not exists test_hive_1670 +( +max_partition bigint +); + +drop view if exists test_hive_1673; + +create view if not exists test_hive_1673 +as +select + cast(test_hive_1662 as int) as test_hive_1662 + ,cast(test_hive_1660 as int) as test_hive_1660 + ,cast(test_hive_1663 as int) as test_hive_1663 + ,cast(test_hive_430 as string) as test_hive_430 + ,cast(test_hive_1661 as string) as test_hive_1661 + ,cast(test_hive_1666 as string) as test_hive_1666 + ,cast(test_hive_1665 as string) as test_hive_1665 + ,cast(test_hive_1664 as string) as test_hive_1664 + ,cast(from_unixtime(unix_timestamp(test_hive_1667,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1667 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1671 +; + +drop view if exists test_hive_1672; + +create view test_hive_1672 +as +select + test_hive_1662 as test_hive_1662 + ,test_hive_1660 as test_hive_1660 + ,test_hive_1663 as test_hive_1663 + ,test_hive_430 as test_hive_430 + ,test_hive_1661 as test_hive_1661 + ,test_hive_1666 as test_hive_1666 + ,test_hive_1665 as test_hive_1665 + ,test_hive_1664 as test_hive_1664 + ,test_hive_1667 as test_hive_1667 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1673 t1 +; + +drop view if exists test_hive_1669; + +create view test_hive_1669 +as +select t1.* +from test_hive_1672 t1 +inner join test_hive_1670 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1654 purge; + +create table test_hive_1654 +( + test_hive_1650 string + ,test_hive_1648 string + ,test_hive_1651 string + ,test_hive_429 string + ,test_hive_1649 string + ,test_hive_1652 string + ,test_hive_1653 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1657 +( + test_hive_1650 string + ,test_hive_1648 string + ,test_hive_1651 string + ,test_hive_429 string + ,test_hive_1649 string + ,test_hive_1652 string + ,test_hive_1653 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1656 purge; + +create table if not exists test_hive_1656 +( +max_partition bigint +); + +drop view if exists test_hive_1659; + +create view if not exists test_hive_1659 +as +select + cast(test_hive_1650 as int) as test_hive_1650 + ,cast(test_hive_1648 as int) as test_hive_1648 + ,cast(test_hive_1651 as int) as test_hive_1651 + ,cast(test_hive_429 as string) as test_hive_429 + ,cast(test_hive_1649 as string) as test_hive_1649 + ,cast(test_hive_1652 as string) as test_hive_1652 + ,cast(from_unixtime(unix_timestamp(test_hive_1653,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1653 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1657 +; + +drop view if exists test_hive_1658; + +create view test_hive_1658 +as +select + test_hive_1650 as test_hive_1650 + ,test_hive_1648 as test_hive_1648 + ,test_hive_1651 as test_hive_1651 + ,test_hive_429 as test_hive_429 + ,test_hive_1649 as test_hive_1649 + ,test_hive_1652 as test_hive_1652 + ,test_hive_1653 as test_hive_1653 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1659 t1 +; + +drop view if exists test_hive_1655; + +create view test_hive_1655 +as +select t1.* +from test_hive_1658 t1 +inner join test_hive_1656 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1642 purge; + +create table test_hive_1642 +( + test_hive_1636 string + ,test_hive_1634 string + ,test_hive_1637 string + ,test_hive_428 string + ,test_hive_1635 string + ,test_hive_1640 string + ,test_hive_1639 string + ,test_hive_1638 string + ,test_hive_1641 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1645 +( + test_hive_1636 string + ,test_hive_1634 string + ,test_hive_1637 string + ,test_hive_428 string + ,test_hive_1635 string + ,test_hive_1640 string + ,test_hive_1639 string + ,test_hive_1638 string + ,test_hive_1641 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1644 purge; + +create table if not exists test_hive_1644 +( +max_partition bigint +); + +drop view if exists test_hive_1647; + +create view if not exists test_hive_1647 +as +select + cast(test_hive_1636 as int) as test_hive_1636 + ,cast(test_hive_1634 as int) as test_hive_1634 + ,cast(test_hive_1637 as int) as test_hive_1637 + ,cast(test_hive_428 as string) as test_hive_428 + ,cast(test_hive_1635 as string) as test_hive_1635 + ,cast(test_hive_1640 as string) as test_hive_1640 + ,cast(test_hive_1639 as string) as test_hive_1639 + ,cast(test_hive_1638 as string) as test_hive_1638 + ,cast(from_unixtime(unix_timestamp(test_hive_1641,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1641 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1645 +; + +drop view if exists test_hive_1646; + +create view test_hive_1646 +as +select + test_hive_1636 as test_hive_1636 + ,test_hive_1634 as test_hive_1634 + ,test_hive_1637 as test_hive_1637 + ,test_hive_428 as test_hive_428 + ,test_hive_1635 as test_hive_1635 + ,test_hive_1640 as test_hive_1640 + ,test_hive_1639 as test_hive_1639 + ,test_hive_1638 as test_hive_1638 + ,test_hive_1641 as test_hive_1641 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1647 t1 +; + +drop view if exists test_hive_1643; + +create view test_hive_1643 +as +select t1.* +from test_hive_1646 t1 +inner join test_hive_1644 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1628 purge; + +create table test_hive_1628 +( + test_hive_1622 string + ,test_hive_1620 string + ,test_hive_1623 string + ,test_hive_427 string + ,test_hive_1621 string + ,test_hive_1626 string + ,test_hive_1625 string + ,test_hive_1624 string + ,test_hive_1627 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1631 +( + test_hive_1622 string + ,test_hive_1620 string + ,test_hive_1623 string + ,test_hive_427 string + ,test_hive_1621 string + ,test_hive_1626 string + ,test_hive_1625 string + ,test_hive_1624 string + ,test_hive_1627 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1630 purge; + +create table if not exists test_hive_1630 +( +max_partition bigint +); + +drop view if exists test_hive_1633; + +create view if not exists test_hive_1633 +as +select + cast(test_hive_1622 as int) as test_hive_1622 + ,cast(test_hive_1620 as int) as test_hive_1620 + ,cast(test_hive_1623 as int) as test_hive_1623 + ,cast(test_hive_427 as string) as test_hive_427 + ,cast(test_hive_1621 as string) as test_hive_1621 + ,cast(test_hive_1626 as string) as test_hive_1626 + ,cast(test_hive_1625 as string) as test_hive_1625 + ,cast(test_hive_1624 as string) as test_hive_1624 + ,cast(from_unixtime(unix_timestamp(test_hive_1627,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1627 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1631 +; + +drop view if exists test_hive_1632; + +create view test_hive_1632 +as +select + test_hive_1622 as test_hive_1622 + ,test_hive_1620 as test_hive_1620 + ,test_hive_1623 as test_hive_1623 + ,test_hive_427 as test_hive_427 + ,test_hive_1621 as test_hive_1621 + ,test_hive_1626 as test_hive_1626 + ,test_hive_1625 as test_hive_1625 + ,test_hive_1624 as test_hive_1624 + ,test_hive_1627 as test_hive_1627 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1633 t1 +; + +drop view if exists test_hive_1629; + +create view test_hive_1629 +as +select t1.* +from test_hive_1632 t1 +inner join test_hive_1630 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1614 purge; + +create table test_hive_1614 +( + test_hive_1608 string + ,test_hive_1606 string + ,test_hive_1609 string + ,test_hive_426 string + ,test_hive_1607 string + ,test_hive_1612 string + ,test_hive_1611 string + ,test_hive_1610 string + ,test_hive_1613 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1617 +( + test_hive_1608 string + ,test_hive_1606 string + ,test_hive_1609 string + ,test_hive_426 string + ,test_hive_1607 string + ,test_hive_1612 string + ,test_hive_1611 string + ,test_hive_1610 string + ,test_hive_1613 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1616 purge; + +create table if not exists test_hive_1616 +( +max_partition bigint +); + +drop view if exists test_hive_1619; + +create view if not exists test_hive_1619 +as +select + cast(test_hive_1608 as int) as test_hive_1608 + ,cast(test_hive_1606 as int) as test_hive_1606 + ,cast(test_hive_1609 as int) as test_hive_1609 + ,cast(test_hive_426 as string) as test_hive_426 + ,cast(test_hive_1607 as string) as test_hive_1607 + ,cast(test_hive_1612 as string) as test_hive_1612 + ,cast(test_hive_1611 as string) as test_hive_1611 + ,cast(test_hive_1610 as string) as test_hive_1610 + ,cast(from_unixtime(unix_timestamp(test_hive_1613,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1613 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1617 +; + +drop view if exists test_hive_1618; + +create view test_hive_1618 +as +select + test_hive_1608 as test_hive_1608 + ,test_hive_1606 as test_hive_1606 + ,test_hive_1609 as test_hive_1609 + ,test_hive_426 as test_hive_426 + ,test_hive_1607 as test_hive_1607 + ,test_hive_1612 as test_hive_1612 + ,test_hive_1611 as test_hive_1611 + ,test_hive_1610 as test_hive_1610 + ,test_hive_1613 as test_hive_1613 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1619 t1 +; + +drop view if exists test_hive_1615; + +create view test_hive_1615 +as +select t1.* +from test_hive_1618 t1 +inner join test_hive_1616 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1600 purge; + +create table test_hive_1600 +( + test_hive_1596 string + ,test_hive_1594 string + ,test_hive_1597 string + ,test_hive_425 string + ,test_hive_1595 string + ,test_hive_1598 string + ,test_hive_1599 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1603 +( + test_hive_1596 string + ,test_hive_1594 string + ,test_hive_1597 string + ,test_hive_425 string + ,test_hive_1595 string + ,test_hive_1598 string + ,test_hive_1599 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1602 purge; + +create table if not exists test_hive_1602 +( +max_partition bigint +); + +drop view if exists test_hive_1605; + +create view if not exists test_hive_1605 +as +select + cast(test_hive_1596 as int) as test_hive_1596 + ,cast(test_hive_1594 as int) as test_hive_1594 + ,cast(test_hive_1597 as int) as test_hive_1597 + ,cast(test_hive_425 as string) as test_hive_425 + ,cast(test_hive_1595 as string) as test_hive_1595 + ,cast(test_hive_1598 as string) as test_hive_1598 + ,cast(from_unixtime(unix_timestamp(test_hive_1599,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1599 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1603 +; + +drop view if exists test_hive_1604; + +create view test_hive_1604 +as +select + test_hive_1596 as test_hive_1596 + ,test_hive_1594 as test_hive_1594 + ,test_hive_1597 as test_hive_1597 + ,test_hive_425 as test_hive_425 + ,test_hive_1595 as test_hive_1595 + ,test_hive_1598 as test_hive_1598 + ,test_hive_1599 as test_hive_1599 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1605 t1 +; + +drop view if exists test_hive_1601; + +create view test_hive_1601 +as +select t1.* +from test_hive_1604 t1 +inner join test_hive_1602 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1588 purge; + +create table test_hive_1588 +( + test_hive_1584 string + ,test_hive_1582 string + ,test_hive_1585 string + ,test_hive_424 string + ,test_hive_1583 string + ,test_hive_1586 string + ,test_hive_1587 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1591 +( + test_hive_1584 string + ,test_hive_1582 string + ,test_hive_1585 string + ,test_hive_424 string + ,test_hive_1583 string + ,test_hive_1586 string + ,test_hive_1587 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1590 purge; + +create table if not exists test_hive_1590 +( +max_partition bigint +); + +drop view if exists test_hive_1593; + +create view if not exists test_hive_1593 +as +select + cast(test_hive_1584 as int) as test_hive_1584 + ,cast(test_hive_1582 as int) as test_hive_1582 + ,cast(test_hive_1585 as int) as test_hive_1585 + ,cast(test_hive_424 as string) as test_hive_424 + ,cast(test_hive_1583 as string) as test_hive_1583 + ,cast(test_hive_1586 as string) as test_hive_1586 + ,cast(from_unixtime(unix_timestamp(test_hive_1587,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1587 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1591 +; + +drop view if exists test_hive_1592; + +create view test_hive_1592 +as +select + test_hive_1584 as test_hive_1584 + ,test_hive_1582 as test_hive_1582 + ,test_hive_1585 as test_hive_1585 + ,test_hive_424 as test_hive_424 + ,test_hive_1583 as test_hive_1583 + ,test_hive_1586 as test_hive_1586 + ,test_hive_1587 as test_hive_1587 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1593 t1 +; + +drop view if exists test_hive_1589; + +create view test_hive_1589 +as +select t1.* +from test_hive_1592 t1 +inner join test_hive_1590 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1576 purge; + +create table test_hive_1576 +( + test_hive_1570 string + ,test_hive_1568 string + ,test_hive_1571 string + ,test_hive_423 string + ,test_hive_1569 string + ,test_hive_1574 string + ,test_hive_1573 string + ,test_hive_1572 string + ,test_hive_1575 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1579 +( + test_hive_1570 string + ,test_hive_1568 string + ,test_hive_1571 string + ,test_hive_423 string + ,test_hive_1569 string + ,test_hive_1574 string + ,test_hive_1573 string + ,test_hive_1572 string + ,test_hive_1575 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1578 purge; + +create table if not exists test_hive_1578 +( +max_partition bigint +); + +drop view if exists test_hive_1581; + +create view if not exists test_hive_1581 +as +select + cast(test_hive_1570 as int) as test_hive_1570 + ,cast(test_hive_1568 as int) as test_hive_1568 + ,cast(test_hive_1571 as int) as test_hive_1571 + ,cast(test_hive_423 as string) as test_hive_423 + ,cast(test_hive_1569 as string) as test_hive_1569 + ,cast(test_hive_1574 as string) as test_hive_1574 + ,cast(test_hive_1573 as string) as test_hive_1573 + ,cast(test_hive_1572 as string) as test_hive_1572 + ,cast(from_unixtime(unix_timestamp(test_hive_1575,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1575 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1579 +; + +drop view if exists test_hive_1580; + +create view test_hive_1580 +as +select + test_hive_1570 as test_hive_1570 + ,test_hive_1568 as test_hive_1568 + ,test_hive_1571 as test_hive_1571 + ,test_hive_423 as test_hive_423 + ,test_hive_1569 as test_hive_1569 + ,test_hive_1574 as test_hive_1574 + ,test_hive_1573 as test_hive_1573 + ,test_hive_1572 as test_hive_1572 + ,test_hive_1575 as test_hive_1575 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1581 t1 +; + +drop view if exists test_hive_1577; + +create view test_hive_1577 +as +select t1.* +from test_hive_1580 t1 +inner join test_hive_1578 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1562 purge; + +create table test_hive_1562 +( + test_hive_1558 string + ,test_hive_1556 string + ,test_hive_1559 string + ,test_hive_422 string + ,test_hive_1557 string + ,test_hive_1560 string + ,test_hive_1561 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1565 +( + test_hive_1558 string + ,test_hive_1556 string + ,test_hive_1559 string + ,test_hive_422 string + ,test_hive_1557 string + ,test_hive_1560 string + ,test_hive_1561 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1564 purge; + +create table if not exists test_hive_1564 +( +max_partition bigint +); + +drop view if exists test_hive_1567; + +create view if not exists test_hive_1567 +as +select + cast(test_hive_1558 as int) as test_hive_1558 + ,cast(test_hive_1556 as int) as test_hive_1556 + ,cast(test_hive_1559 as int) as test_hive_1559 + ,cast(test_hive_422 as string) as test_hive_422 + ,cast(test_hive_1557 as string) as test_hive_1557 + ,cast(test_hive_1560 as string) as test_hive_1560 + ,cast(from_unixtime(unix_timestamp(test_hive_1561,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1561 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1565 +; + +drop view if exists test_hive_1566; + +create view test_hive_1566 +as +select + test_hive_1558 as test_hive_1558 + ,test_hive_1556 as test_hive_1556 + ,test_hive_1559 as test_hive_1559 + ,test_hive_422 as test_hive_422 + ,test_hive_1557 as test_hive_1557 + ,test_hive_1560 as test_hive_1560 + ,test_hive_1561 as test_hive_1561 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1567 t1 +; + +drop view if exists test_hive_1563; + +create view test_hive_1563 +as +select t1.* +from test_hive_1566 t1 +inner join test_hive_1564 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1550 purge; + +create table test_hive_1550 +( + test_hive_1545 string + ,test_hive_1543 string + ,test_hive_1546 string + ,test_hive_421 string + ,test_hive_1544 string + ,test_hive_1548 string + ,test_hive_1547 string + ,test_hive_1549 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1553 +( + test_hive_1545 string + ,test_hive_1543 string + ,test_hive_1546 string + ,test_hive_421 string + ,test_hive_1544 string + ,test_hive_1548 string + ,test_hive_1547 string + ,test_hive_1549 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1552 purge; + +create table if not exists test_hive_1552 +( +max_partition bigint +); + +drop view if exists test_hive_1555; + +create view if not exists test_hive_1555 +as +select + cast(test_hive_1545 as int) as test_hive_1545 + ,cast(test_hive_1543 as int) as test_hive_1543 + ,cast(test_hive_1546 as int) as test_hive_1546 + ,cast(test_hive_421 as string) as test_hive_421 + ,cast(test_hive_1544 as string) as test_hive_1544 + ,cast(test_hive_1548 as string) as test_hive_1548 + ,cast(test_hive_1547 as string) as test_hive_1547 + ,cast(from_unixtime(unix_timestamp(test_hive_1549,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1549 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1553 +; + +drop view if exists test_hive_1554; + +create view test_hive_1554 +as +select + test_hive_1545 as test_hive_1545 + ,test_hive_1543 as test_hive_1543 + ,test_hive_1546 as test_hive_1546 + ,test_hive_421 as test_hive_421 + ,test_hive_1544 as test_hive_1544 + ,test_hive_1548 as test_hive_1548 + ,test_hive_1547 as test_hive_1547 + ,test_hive_1549 as test_hive_1549 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1555 t1 +; + +drop view if exists test_hive_1551; + +create view test_hive_1551 +as +select t1.* +from test_hive_1554 t1 +inner join test_hive_1552 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1328 purge; + +create table test_hive_1328 +( + test_hive_1322 string + ,test_hive_1318 string + ,test_hive_1323 string + ,test_hive_335 string + ,test_hive_1321 string + ,test_hive_1320 string + ,test_hive_1319 string + ,test_hive_1326 string + ,test_hive_1325 string + ,test_hive_1324 string + ,test_hive_1327 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1331 +( + test_hive_1322 string + ,test_hive_1318 string + ,test_hive_1323 string + ,test_hive_335 string + ,test_hive_1321 string + ,test_hive_1320 string + ,test_hive_1319 string + ,test_hive_1326 string + ,test_hive_1325 string + ,test_hive_1324 string + ,test_hive_1327 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1330 purge; + +create table if not exists test_hive_1330 +( +max_partition bigint +); + +drop view if exists test_hive_1333; + +create view if not exists test_hive_1333 +as +select + cast(test_hive_1322 as int) as test_hive_1322 + ,cast(test_hive_1318 as int) as test_hive_1318 + ,cast(test_hive_1323 as int) as test_hive_1323 + ,cast(test_hive_335 as string) as test_hive_335 + ,cast(test_hive_1321 as string) as test_hive_1321 + ,cast(from_unixtime(unix_timestamp(test_hive_1320 ,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1320 + ,cast(from_unixtime(unix_timestamp(test_hive_1319 ,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1319 + ,cast(test_hive_1326 as string) as test_hive_1326 + ,cast(test_hive_1325 as string) as test_hive_1325 + ,cast(test_hive_1324 as string) as test_hive_1324 + ,cast(from_unixtime(unix_timestamp(test_hive_1327,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1327 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1331 +; + +drop view if exists test_hive_1332; + +create view test_hive_1332 +as +select + test_hive_1322 as test_hive_1322 + ,test_hive_1318 as test_hive_1318 + ,test_hive_1323 as test_hive_1323 + ,test_hive_335 as test_hive_335 + ,test_hive_1321 as test_hive_1321 + ,test_hive_1320 as test_hive_1320 + ,test_hive_1319 as test_hive_1319 + ,test_hive_1326 as test_hive_1326 + ,test_hive_1325 as test_hive_1325 + ,test_hive_1324 as test_hive_1324 + ,test_hive_1327 as test_hive_1327 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1333 t1 +; + +drop view if exists test_hive_1329; + +create view test_hive_1329 +as +select t1.* +from test_hive_1332 t1 +inner join test_hive_1330 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1276 purge; + +create table test_hive_1276 +( + test_hive_1272 string + ,test_hive_1270 string + ,test_hive_1273 string + ,test_hive_308 string + ,test_hive_1271 string + ,test_hive_1274 string + ,test_hive_1275 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1279 +( + test_hive_1272 string + ,test_hive_1270 string + ,test_hive_1273 string + ,test_hive_308 string + ,test_hive_1271 string + ,test_hive_1274 string + ,test_hive_1275 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1278 purge; + +create table if not exists test_hive_1278 +( +max_partition bigint +); + +drop view if exists test_hive_1281; + +create view if not exists test_hive_1281 +as +select + cast(test_hive_1272 as int) as test_hive_1272 + ,cast(test_hive_1270 as int) as test_hive_1270 + ,cast(test_hive_1273 as int) as test_hive_1273 + ,cast(test_hive_308 as string) as test_hive_308 + ,cast(test_hive_1271 as string) as test_hive_1271 + ,cast(test_hive_1274 as string) as test_hive_1274 + ,cast(from_unixtime(unix_timestamp(test_hive_1275,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1275 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1279 +; + +drop view if exists test_hive_1280; + +create view test_hive_1280 +as +select + test_hive_1272 as test_hive_1272 + ,test_hive_1270 as test_hive_1270 + ,test_hive_1273 as test_hive_1273 + ,test_hive_308 as test_hive_308 + ,test_hive_1271 as test_hive_1271 + ,test_hive_1274 as test_hive_1274 + ,test_hive_1275 as test_hive_1275 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1281 t1 +; + +drop view if exists test_hive_1277; + +create view test_hive_1277 +as +select t1.* +from test_hive_1280 t1 +inner join test_hive_1278 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1264 purge; + +create table test_hive_1264 +( + test_hive_1258 string + ,test_hive_1256 string + ,test_hive_1259 string + ,test_hive_307 string + ,test_hive_306 string + ,test_hive_1257 string + ,test_hive_1262 string + ,test_hive_1261 string + ,test_hive_1260 string + ,test_hive_1263 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1267 +( + test_hive_1258 string + ,test_hive_1256 string + ,test_hive_1259 string + ,test_hive_307 string + ,test_hive_306 string + ,test_hive_1257 string + ,test_hive_1262 string + ,test_hive_1261 string + ,test_hive_1260 string + ,test_hive_1263 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1266 purge; + +create table if not exists test_hive_1266 +( +max_partition bigint +); + +drop view if exists test_hive_1269; + +create view if not exists test_hive_1269 +as +select + cast(test_hive_1258 as int) as test_hive_1258 + ,cast(test_hive_1256 as int) as test_hive_1256 + ,cast(test_hive_1259 as int) as test_hive_1259 + ,cast(test_hive_307 as string) as test_hive_307 + ,cast(test_hive_306 as string) as test_hive_306 + ,cast(test_hive_1257 as string) as test_hive_1257 + ,cast(test_hive_1262 as string) as test_hive_1262 + ,cast(test_hive_1261 as string) as test_hive_1261 + ,cast(test_hive_1260 as string) as test_hive_1260 + ,cast(from_unixtime(unix_timestamp(test_hive_1263,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1263 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1267 +; + +drop view if exists test_hive_1268; + +create view test_hive_1268 +as +select + test_hive_1258 as test_hive_1258 + ,test_hive_1256 as test_hive_1256 + ,test_hive_1259 as test_hive_1259 + ,test_hive_307 as test_hive_307 + ,test_hive_306 as test_hive_306 + ,test_hive_1257 as test_hive_1257 + ,test_hive_1262 as test_hive_1262 + ,test_hive_1261 as test_hive_1261 + ,test_hive_1260 as test_hive_1260 + ,test_hive_1263 as test_hive_1263 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1269 t1 +; + +drop view if exists test_hive_1265; + +create view test_hive_1265 +as +select t1.* +from test_hive_1268 t1 +inner join test_hive_1266 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1226 purge; + +create table test_hive_1226 +( + test_hive_1222 string + ,test_hive_1220 string + ,test_hive_1223 string + ,test_hive_280 string + ,test_hive_1221 string + ,test_hive_1224 string + ,test_hive_1225 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1229 +( + test_hive_1222 string + ,test_hive_1220 string + ,test_hive_1223 string + ,test_hive_280 string + ,test_hive_1221 string + ,test_hive_1224 string + ,test_hive_1225 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1228 purge; + +create table if not exists test_hive_1228 +( +max_partition bigint +); + +drop view if exists test_hive_1231; + +create view if not exists test_hive_1231 +as +select + cast(test_hive_1222 as int) as test_hive_1222 + ,cast(test_hive_1220 as int) as test_hive_1220 + ,cast(test_hive_1223 as int) as test_hive_1223 + ,cast(test_hive_280 as string) as test_hive_280 + ,cast(test_hive_1221 as string) as test_hive_1221 + ,cast(test_hive_1224 as string) as test_hive_1224 + ,cast(from_unixtime(unix_timestamp(test_hive_1225,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1225 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1229 +; + +drop view if exists test_hive_1230; + +create view test_hive_1230 +as +select + test_hive_1222 as test_hive_1222 + ,test_hive_1220 as test_hive_1220 + ,test_hive_1223 as test_hive_1223 + ,test_hive_280 as test_hive_280 + ,test_hive_1221 as test_hive_1221 + ,test_hive_1224 as test_hive_1224 + ,test_hive_1225 as test_hive_1225 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1231 t1 +; + +drop view if exists test_hive_1227; + +create view test_hive_1227 +as +select t1.* +from test_hive_1230 t1 +inner join test_hive_1228 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_1214 purge; + +create table test_hive_1214 +( + test_hive_1210 string + ,test_hive_1208 string + ,test_hive_1211 string + ,test_hive_279 string + ,test_hive_1209 string + ,test_hive_1212 string + ,test_hive_1213 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_1217 +( + test_hive_1210 string + ,test_hive_1208 string + ,test_hive_1211 string + ,test_hive_279 string + ,test_hive_1209 string + ,test_hive_1212 string + ,test_hive_1213 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_1216 purge; + +create table if not exists test_hive_1216 +( +max_partition bigint +); + +drop view if exists test_hive_1219; + +create view if not exists test_hive_1219 +as +select + cast(test_hive_1210 as int) as test_hive_1210 + ,cast(test_hive_1208 as int) as test_hive_1208 + ,cast(test_hive_1211 as int) as test_hive_1211 + ,cast(test_hive_279 as string) as test_hive_279 + ,cast(test_hive_1209 as string) as test_hive_1209 + ,cast(test_hive_1212 as string) as test_hive_1212 + ,cast(from_unixtime(unix_timestamp(test_hive_1213,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1213 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1217 +; + +drop view if exists test_hive_1218; + +create view test_hive_1218 +as +select + test_hive_1210 as test_hive_1210 + ,test_hive_1208 as test_hive_1208 + ,test_hive_1211 as test_hive_1211 + ,test_hive_279 as test_hive_279 + ,test_hive_1209 as test_hive_1209 + ,test_hive_1212 as test_hive_1212 + ,test_hive_1213 as test_hive_1213 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1219 t1 +; + +drop view if exists test_hive_1215; + +create view test_hive_1215 +as +select t1.* +from test_hive_1218 t1 +inner join test_hive_1216 t2 on +t1.ds_ts = t2.max_partition; +drop table if exists test_hive_2046 purge; + +create table test_hive_2046 +( + test_hive_2043 string + ,test_hive_2034 string + ,test_hive_2044 string + ,test_hive_2033 string + ,test_hive_459 string + ,test_hive_460 string + ,test_hive_461 string + ,test_hive_462 string + ,test_hive_463 string + ,test_hive_464 string + ,test_hive_465 string + ,test_hive_2035 string + ,test_hive_2036 string + ,test_hive_2037 string + ,test_hive_2038 string + ,test_hive_2039 string + ,test_hive_2040 string + ,test_hive_2041 string + ,test_hive_2042 string + ,test_hive_467 string + ,test_hive_468 string + ,test_hive_469 string + ,test_hive_466 string + ,test_hive_2045 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = ''); + + +create table if not exists test_hive_2049 +( + test_hive_2043 string + ,test_hive_2034 string + ,test_hive_2044 string + ,test_hive_2033 string + ,test_hive_459 string + ,test_hive_460 string + ,test_hive_461 string + ,test_hive_462 string + ,test_hive_463 string + ,test_hive_464 string + ,test_hive_465 string + ,test_hive_2035 string + ,test_hive_2036 string + ,test_hive_2037 string + ,test_hive_2038 string + ,test_hive_2039 string + ,test_hive_2040 string + ,test_hive_2041 string + ,test_hive_2042 string + ,test_hive_467 string + ,test_hive_468 string + ,test_hive_469 string + ,test_hive_466 string + ,test_hive_2045 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet; + +drop table if exists test_hive_2048 purge; + +create table if not exists test_hive_2048 +( +max_partition bigint +); + +drop view if exists test_hive_2051; + +create view if not exists test_hive_2051 +as +select + cast(test_hive_2043 as int) as test_hive_2043 + ,cast(test_hive_2034 as int) as test_hive_2034 + ,cast(test_hive_2044 as int) as test_hive_2044 + ,cast(test_hive_2033 as string) as test_hive_2033 + ,cast(test_hive_459 as string) as test_hive_459 + ,cast(test_hive_460 as string) as test_hive_460 + ,cast(test_hive_461 as string) as test_hive_461 + ,cast(test_hive_462 as string) as test_hive_462 + ,cast(test_hive_463 as string) as test_hive_463 + ,cast(test_hive_464 as string) as test_hive_464 + ,cast(test_hive_465 as string) as test_hive_465 + ,cast(test_hive_2035 as int) as test_hive_2035 + ,cast(test_hive_2036 as int) as test_hive_2036 + ,cast(test_hive_2037 as int) as test_hive_2037 + ,cast(test_hive_2038 as int) as test_hive_2038 + ,cast(test_hive_2039 as int) as test_hive_2039 + ,cast(test_hive_2040 as int) as test_hive_2040 + ,cast(test_hive_2041 as int) as test_hive_2041 + ,cast(test_hive_2042 as int) as test_hive_2042 + ,cast(test_hive_467 as string) as test_hive_467 + ,cast(test_hive_468 as string) as test_hive_468 + ,cast(test_hive_469 as string) as test_hive_469 + ,cast(test_hive_466 as string) as test_hive_466 + ,cast(from_unixtime(unix_timestamp(test_hive_2045,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2045 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2049 +; + +drop view if exists test_hive_2050; + +create view test_hive_2050 +as +select + test_hive_2043 as test_hive_2043 + ,test_hive_2034 as test_hive_2034 + ,test_hive_2044 as test_hive_2044 + ,test_hive_2033 as test_hive_2033 + ,test_hive_459 as test_hive_459 + ,test_hive_460 as test_hive_460 + ,test_hive_461 as test_hive_461 + ,test_hive_462 as test_hive_462 + ,test_hive_463 as test_hive_463 + ,test_hive_464 as test_hive_464 + ,test_hive_465 as test_hive_465 + ,test_hive_2035 as test_hive_2035 + ,test_hive_2036 as test_hive_2036 + ,test_hive_2037 as test_hive_2037 + ,test_hive_2038 as test_hive_2038 + ,test_hive_2039 as test_hive_2039 + ,test_hive_2040 as test_hive_2040 + ,test_hive_2041 as test_hive_2041 + ,test_hive_2042 as test_hive_2042 + ,test_hive_467 as test_hive_467 + ,test_hive_468 as test_hive_468 + ,test_hive_469 as test_hive_469 + ,test_hive_466 as test_hive_466 + ,test_hive_2045 as test_hive_2045 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2051 t1 +; + +drop view if exists test_hive_2047; + +create view test_hive_2047 +as +select t1.* +from test_hive_2050 t1 +inner join test_hive_2048 t2 on +t1.ds_ts = t2.max_partition; + +set hive.stageid.rearrange=execution; +set hive.auto.convert.join=true; +set hive.cbo.enable=false; + +explain select +t1.test_hive_1018, +t1.test_hive_1004, +t1.test_hive_1025, +t2.test_hive_1560, +t4.test_hive_1274, +t1.test_hive_29, +t7.test_hive_1948, +t1.test_hive_97, +t32.test_hive_1610, +t1.test_hive_98, +t34.test_hive_1972, +t35.test_hive_1792, +t41.test_hive_1224, +t43.test_hive_1895, +t44.test_hive_1907, +t45.test_hive_1935, +t46.test_hive_2010, +t47.test_hive_2023, +t1.test_hive_78, +t15.test_hive_1260, +t1.test_hive_79, +t1.test_hive_24, +t3.test_hive_1716, +t42.test_hive_1224, +t14.test_hive_1198, +t23.test_hive_1459, +t28.test_hive_1533, +t26.test_hive_1503, +t11.test_hive_1154, +t21.test_hive_1429, +t17.test_hive_1340, +t18.test_hive_1356, +t38.test_hive_1847, +t39.test_hive_1859, +t40.test_hive_1871, +t12.test_hive_1168, +t22.test_hive_1443, +t13.test_hive_1182, +t25.test_hive_1487, +t24.test_hive_1473, +t27.test_hive_1517, +t8.test_hive_1110, +t9.test_hive_1124, +t10.test_hive_1138, +t16.test_hive_1309, +t36.test_hive_1806, +t1.test_hive_104, +t1.test_hive_1002, +t1.test_hive_1003, +t1.test_hive_25, +t5.test_hive_1960, +t29.test_hive_1547, +t30.test_hive_1224, +t31.test_hive_1224, +t33.test_hive_1778, +t37.test_hive_1834, +t19.test_hive_1972, +t20.test_hive_1972, +t1.test_hive_100, +t1.test_hive_1023, +t1.test_hive_1024, +t1.test_hive_1010, +t1.test_hive_1010_a_d, +t1.test_hive_1010_a_g, +t1.test_hive_1026, +t1.test_hive_1000, +t1.test_hive_1001, +t1.test_hive_1030, +t1.test_hive_1030_1, +t1.test_hive_1030_2, +t1.test_hive_1030_3, +t1.test_hive_1021, +t1.test_hive_1020, +t1.test_hive_1022, +t1.test_hive_1019, +t1.test_hive_1027, +t1.test_hive_1028, +t1.test_hive_1029, +t1.test_hive_1005, +t1.test_hive_1005_a_d, +t1.test_hive_1005_psr, +t1.test_hive_1005_psr_a_d, +t1.test_hive_1005_psr_e, +t1.test_hive_1013, +t1.test_hive_1013_a_d, +t1.test_hive_1013_psr, +t1.test_hive_1013_psr_a_d, +t1.test_hive_1013_psr_e, +t1.test_hive_1034 +from test_hive_1036 t1 +join test_hive_1563 t2 on t1.test_hive_23 = t2.test_hive_422 +join test_hive_1721 t3 on t1.test_hive_26 = t3.test_hive_434 +join test_hive_1277 t4 on t1.test_hive_27 = t4.test_hive_308 +join test_hive_1963 t5 on t1.test_hive_28 = t5.test_hive_453 +join test_hive_1951 t7 on t1.test_hive_30 = t7.test_hive_452 +join test_hive_1115 t8 on t1.test_hive_71 = t8.test_hive_272 +join test_hive_1129 t9 on t1.test_hive_72 = t9.test_hive_273 +join test_hive_1143 t10 on t1.test_hive_73 = t10.test_hive_274 +join test_hive_1159 t11 on t1.test_hive_74 = t11.test_hive_275 +join test_hive_1173 t12 on t1.test_hive_75 = t12.test_hive_276 +join test_hive_1187 t13 on t1.test_hive_76 = t13.test_hive_277 +join test_hive_1203 t14 on t1.test_hive_77 = t14.test_hive_278 +join test_hive_1265 t15 on t1.test_hive_78 = t15.test_hive_306 +join test_hive_1313 t16 on t1.test_hive_80 = t16.test_hive_334 +join test_hive_1345 t17 on t1.test_hive_81 = t17.test_hive_336 +join test_hive_1361 t18 on t1.test_hive_82 = t18.test_hive_337 +join test_hive_1977 t19 on t1.test_hive_83 = t19.test_hive_454 +join test_hive_1977 t20 on t1.test_hive_84 = t20.test_hive_454 +join test_hive_1434 t21 on t1.test_hive_85 = t21.test_hive_413 +join test_hive_1448 t22 on t1.test_hive_86 = t22.test_hive_414 +join test_hive_1464 t23 on t1.test_hive_87 = t23.test_hive_415 +join test_hive_1478 t24 on t1.test_hive_88 = t24.test_hive_416 +join test_hive_1492 t25 on t1.test_hive_89 = t25.test_hive_417 +join test_hive_1508 t26 on t1.test_hive_90 = t26.test_hive_418 +join test_hive_1522 t27 on t1.test_hive_91 = t27.test_hive_419 +join test_hive_1538 t28 on t1.test_hive_92 = t28.test_hive_420 +join test_hive_1551 t29 on t1.test_hive_93 = t29.test_hive_421 +join test_hive_1227 t30 on t1.test_hive_94 = t30.test_hive_280 +join test_hive_1227 t31 on t1.test_hive_95 = t31.test_hive_280 +join test_hive_1615 t32 on t1.test_hive_96 = t32.test_hive_426 +join test_hive_1783 t33 on t1.test_hive_99 = t33.test_hive_440 +join test_hive_1977 t34 on t1.test_hive_101 = t34.test_hive_454 +join test_hive_1797 t35 on t1.test_hive_102 = t35.test_hive_441 +join test_hive_1811 t36 on t1.test_hive_103 = t36.test_hive_442 +join test_hive_1838 t37 on t1.test_hive_105 = t37.test_hive_444 +join test_hive_1850 t38 on t1.test_hive_106 = t38.test_hive_445 +join test_hive_1862 t39 on t1.test_hive_107 = t39.test_hive_446 +join test_hive_1874 t40 on t1.test_hive_108 = t40.test_hive_447 +join test_hive_1227 t41 on t1.test_hive_109 = t41.test_hive_280 +join test_hive_1227 t42 on t1.test_hive_110 = t42.test_hive_280 +join test_hive_1898 t43 on t1.test_hive_111 = t43.test_hive_449 +join test_hive_1911 t44 on t1.test_hive_112 = t44.test_hive_450 +join test_hive_1939 t45 on t1.test_hive_113 = t45.test_hive_451 +join test_hive_2014 t46 on t1.test_hive_114 = t46.test_hive_457 +join test_hive_2028 t47 on t1.test_hive_115 = t47.test_hive_458 +; diff --git ql/src/test/queries/clientpositive/mapjoin2.q ql/src/test/queries/clientpositive/mapjoin2.q index e194bd0177..014dabeea7 100644 --- ql/src/test/queries/clientpositive/mapjoin2.q +++ ql/src/test/queries/clientpositive/mapjoin2.q @@ -6,16 +6,30 @@ create table tbl_n1 (n bigint, t string); insert into tbl_n1 values (1, 'one'); insert into tbl_n1 values(2, 'two'); +explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n; select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n; +explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n; select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n; +explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n; select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n; +explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; +explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; +explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; +explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key; diff --git ql/src/test/queries/clientpositive/mapjoin46.q ql/src/test/queries/clientpositive/mapjoin46.q index 9de7113907..81f96107c9 100644 --- ql/src/test/queries/clientpositive/mapjoin46.q +++ ql/src/test/queries/clientpositive/mapjoin46.q @@ -3,6 +3,8 @@ set hive.auto.convert.join=true; set hive.strict.checks.cartesian.product=false; set hive.join.emit.interval=2; +-- SORT_QUERY_RESULTS + CREATE TABLE test1_n4 (key INT, value INT, col_1 STRING); INSERT INTO test1_n4 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'), (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car'); @@ -173,6 +175,22 @@ ON (test1_n4.value=test2_n2.value OR test2_n2.key between 100 and 102)); -- Disjunction with pred on multiple inputs and single inputs (full outer join) +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102); + +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102); + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 @@ -185,8 +203,23 @@ FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value OR test1_n4.key between 100 and 102 OR test2_n2.key between 100 and 102); +SET hive.merge.nway.joins=true; -- Disjunction with pred on multiple inputs and left input (full outer join) +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test1_n4.key between 100 and 102); + +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test1_n4.key between 100 and 102); + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 @@ -197,8 +230,23 @@ SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value OR test1_n4.key between 100 and 102); +SET hive.merge.nway.joins=true; -- Disjunction with pred on multiple inputs and right input (full outer join) +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102); + +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102); + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 @@ -209,8 +257,25 @@ SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value OR test2_n2.key between 100 and 102); +SET hive.merge.nway.joins=true; -- Keys plus residual (full outer join) +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)); + +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)); + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 @@ -223,8 +288,51 @@ FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value AND (test1_n4.key between 100 and 102 OR test2_n2.key between 100 and 102)); +SET hive.merge.nway.joins=true; -- Mixed ( FOJ (ROJ, LOJ) ) +SET hive.mapjoin.full.outer=false; +EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2); + +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2); + +SET hive.mapjoin.full.outer=true; +SET hive.merge.nway.joins=false; EXPLAIN SELECT * FROM ( @@ -263,3 +371,4 @@ FULL OUTER JOIN ( OR test2_n2.key between 100 and 102)) ) sq2 ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2); +SET hive.merge.nway.joins=true; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/parquet_vectorization_0.q ql/src/test/queries/clientpositive/parquet_vectorization_0.q index fa8ec8633c..55816d8602 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_0.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_0.q @@ -10,7 +10,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -- Use ORDER BY clauses to generate 2 stages. -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(ctinyint) as c1, MAX(ctinyint), COUNT(ctinyint), @@ -25,7 +25,7 @@ SELECT MIN(ctinyint) as c1, FROM alltypesparquet ORDER BY c1; -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(ctinyint) as c1 FROM alltypesparquet ORDER BY c1; @@ -59,7 +59,7 @@ SELECT FROM alltypesparquet ORDER BY c1; -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cbigint) as c1, MAX(cbigint), COUNT(cbigint), @@ -74,7 +74,7 @@ SELECT MIN(cbigint) as c1, FROM alltypesparquet ORDER BY c1; -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cbigint) as c1 FROM alltypesparquet ORDER BY c1; @@ -108,7 +108,7 @@ SELECT FROM alltypesparquet ORDER BY c1; -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cfloat) as c1, MAX(cfloat), COUNT(cfloat), @@ -123,7 +123,7 @@ SELECT MIN(cfloat) as c1, FROM alltypesparquet ORDER BY c1; -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cfloat) as c1 FROM alltypesparquet ORDER BY c1; @@ -157,7 +157,7 @@ SELECT FROM alltypesparquet ORDER BY c1; -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(cbigint), (-(AVG(cbigint))), (-6432 + AVG(cbigint)), diff --git ql/src/test/queries/clientpositive/parquet_vectorization_1.q ql/src/test/queries/clientpositive/parquet_vectorization_1.q index 311334da11..33c5698b38 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_1.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_1.q @@ -5,7 +5,7 @@ set hive.vectorized.execution.reduce.enabled=true; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT VAR_POP(ctinyint), (VAR_POP(ctinyint) / -26.28), SUM(cfloat), diff --git ql/src/test/queries/clientpositive/parquet_vectorization_10.q ql/src/test/queries/clientpositive/parquet_vectorization_10.q index c560515a5f..1329a476e8 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_10.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_10.q @@ -4,7 +4,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cdouble, ctimestamp1, ctinyint, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_11.q ql/src/test/queries/clientpositive/parquet_vectorization_11.q index a6e6810f0d..4115c2974d 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_11.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_11.q @@ -4,7 +4,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cboolean1, cdouble, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_12.q ql/src/test/queries/clientpositive/parquet_vectorization_12.q index ff6949aefe..e8787558b9 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_12.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_12.q @@ -5,7 +5,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cbigint, cboolean1, cstring1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_13.q ql/src/test/queries/clientpositive/parquet_vectorization_13.q index 0b23f505f4..d13ae73c1d 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_13.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_13.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, ctimestamp1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_14.q ql/src/test/queries/clientpositive/parquet_vectorization_14.q index 3ec437f754..868c899b1a 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_14.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_14.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cfloat, cstring1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_15.q ql/src/test/queries/clientpositive/parquet_vectorization_15.q index 69065232e5..70cf175451 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_15.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_15.q @@ -9,7 +9,7 @@ set hive.vectorized.execution.reduce.enabled=false; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cboolean1, cdouble, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_16.q ql/src/test/queries/clientpositive/parquet_vectorization_16.q index 49d7b5a99c..afae4463bd 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_16.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_16.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_17.q ql/src/test/queries/clientpositive/parquet_vectorization_17.q index dafe677ff9..1af4dde456 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_17.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_17.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cstring1, cint, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_2.q ql/src/test/queries/clientpositive/parquet_vectorization_2.q index a2c87f73fb..8bec598e0e 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_2.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_2.q @@ -4,7 +4,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(csmallint), (AVG(csmallint) % -563), (AVG(csmallint) + 762), diff --git ql/src/test/queries/clientpositive/parquet_vectorization_3.q ql/src/test/queries/clientpositive/parquet_vectorization_3.q index 7b08952982..4f4bd57331 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_3.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_3.q @@ -5,7 +5,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT STDDEV_SAMP(csmallint), (STDDEV_SAMP(csmallint) - 10.175), STDDEV_POP(ctinyint), diff --git ql/src/test/queries/clientpositive/parquet_vectorization_4.q ql/src/test/queries/clientpositive/parquet_vectorization_4.q index d2bd83241c..380cda8021 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_4.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_4.q @@ -4,7 +4,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cint), (SUM(cint) * -563), (-3728 + SUM(cint)), diff --git ql/src/test/queries/clientpositive/parquet_vectorization_5.q ql/src/test/queries/clientpositive/parquet_vectorization_5.q index 071048c99e..4dd9705246 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_5.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_5.q @@ -4,7 +4,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT MAX(csmallint), (MAX(csmallint) * -75), COUNT(*), diff --git ql/src/test/queries/clientpositive/parquet_vectorization_6.q ql/src/test/queries/clientpositive/parquet_vectorization_6.q index 7a7a18b0c9..bee0c53432 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_6.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_6.q @@ -4,7 +4,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cfloat, cstring1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_7.q ql/src/test/queries/clientpositive/parquet_vectorization_7.q index 55f21af012..70717f46dc 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_7.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_7.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, csmallint, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_8.q ql/src/test/queries/clientpositive/parquet_vectorization_8.q index 4d97cad717..6a5a2c52c4 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_8.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_8.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cdouble, cboolean1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_9.q ql/src/test/queries/clientpositive/parquet_vectorization_9.q index 49d7b5a99c..afae4463bd 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_9.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_9.q @@ -6,7 +6,7 @@ set hive.fetch.task.conversion=none; -- SORT_QUERY_RESULTS -EXPLAIN VECTORIZATION DETAIL +EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, diff --git ql/src/test/queries/clientpositive/parquet_vectorization_limit.q ql/src/test/queries/clientpositive/parquet_vectorization_limit.q index 399c67eb3c..6dac0d6460 100644 --- ql/src/test/queries/clientpositive/parquet_vectorization_limit.q +++ ql/src/test/queries/clientpositive/parquet_vectorization_limit.q @@ -14,31 +14,31 @@ set hive.limit.pushdown.memory.usage=0.3f; -- HIVE-3562 Some limit can be pushed down to map stage - c/p parts from limit_pushdown -explain vectorization detail +explain VECTORIZATION EXPRESSION select ctinyint,cdouble,csmallint from alltypesparquet where ctinyint is not null order by ctinyint,cdouble limit 20; select ctinyint,cdouble,csmallint from alltypesparquet where ctinyint is not null order by ctinyint,cdouble limit 20; -- deduped RS -explain vectorization detail +explain VECTORIZATION EXPRESSION select ctinyint,avg(cdouble + 1) from alltypesparquet group by ctinyint order by ctinyint limit 20; select ctinyint,avg(cdouble + 1) from alltypesparquet group by ctinyint order by ctinyint limit 20; -- distincts -explain vectorization detail +explain VECTORIZATION EXPRESSION select distinct(ctinyint) from alltypesparquet limit 20; select distinct(ctinyint) from alltypesparquet limit 20; -explain vectorization detail +explain VECTORIZATION EXPRESSION select ctinyint, count(distinct(cdouble)) from alltypesparquet group by ctinyint order by ctinyint limit 20; select ctinyint, count(distinct(cdouble)) from alltypesparquet group by ctinyint order by ctinyint limit 20; -- limit zero -explain vectorization detail +explain VECTORIZATION EXPRESSION select ctinyint,cdouble from alltypesparquet order by ctinyint limit 0; select ctinyint,cdouble from alltypesparquet order by ctinyint limit 0; -- 2MR (applied to last RS) -explain vectorization detail +explain VECTORIZATION EXPRESSION select cdouble, sum(ctinyint) as sum from alltypesparquet where ctinyint is not null group by cdouble order by sum, cdouble limit 20; select cdouble, sum(ctinyint) as sum from alltypesparquet where ctinyint is not null group by cdouble order by sum, cdouble limit 20; diff --git ql/src/test/queries/clientpositive/schema_evol_undecorated.q ql/src/test/queries/clientpositive/schema_evol_undecorated.q new file mode 100644 index 0000000000..1fe5c08d5a --- /dev/null +++ ql/src/test/queries/clientpositive/schema_evol_undecorated.q @@ -0,0 +1,14 @@ + +set hive.metastore.disallow.incompatible.col.type.changes=true; + +create external table new_char_decimal (c1 char(20)); +alter table new_char_decimal change c1 c1 decimal(31,0); + +create external table new_varchar_decimal (c1 varchar(25)); +alter table new_varchar_decimal change c1 c1 decimal(12,5); + +create external table new_char_double (c1 char(20)); +alter table new_char_double change c1 c1 double; + +create external table new_varchar_double (c1 varchar(25)); +alter table new_varchar_double change c1 c1 double; \ No newline at end of file diff --git ql/src/test/queries/clientpositive/tez_dynpart_hashjoin_3.q ql/src/test/queries/clientpositive/tez_dynpart_hashjoin_3.q index 0500a621ed..10e982e1f6 100644 --- ql/src/test/queries/clientpositive/tez_dynpart_hashjoin_3.q +++ ql/src/test/queries/clientpositive/tez_dynpart_hashjoin_3.q @@ -24,3 +24,9 @@ select a.* from alltypesorc a left outer join src b on a.cint = cast(b.key as int) and (a.cint < 100) limit 1; + +explain +select a.* +from alltypesorc a left outer join src b +on a.cint = cast(b.key as int) +limit 1; diff --git ql/src/test/queries/clientpositive/union14.q ql/src/test/queries/clientpositive/union14.q index 34f73cda63..f56e53dd91 100644 --- ql/src/test/queries/clientpositive/union14.q +++ ql/src/test/queries/clientpositive/union14.q @@ -2,7 +2,9 @@ --! qt:dataset:src set hive.mapred.mode=nonstrict; set hive.map.aggr = true; --- SORT_BEFORE_DIFF + +-- SORT_QUERY_RESULTS + -- union case: 1 subquery is a map-reduce job, different inputs for sub-queries, followed by reducesink explain diff --git ql/src/test/queries/clientpositive/union7.q ql/src/test/queries/clientpositive/union7.q index 4d3eed54cc..8668be05de 100644 --- ql/src/test/queries/clientpositive/union7.q +++ ql/src/test/queries/clientpositive/union7.q @@ -4,7 +4,8 @@ set hive.mapred.mode=nonstrict; set hive.explain.user=false; set hive.map.aggr = true; --- SORT_BEFORE_DIFF +-- SORT_QUERY_RESULTS + -- union case: 1 subquery is a map-reduce job, different inputs for sub-queries, followed by reducesink explain diff --git ql/src/test/queries/clientpositive/union_null.q ql/src/test/queries/clientpositive/union_null.q index 49a0e0818b..44d148f0e6 100644 --- ql/src/test/queries/clientpositive/union_null.q +++ ql/src/test/queries/clientpositive/union_null.q @@ -1,6 +1,7 @@ --! qt:dataset:src1 --! qt:dataset:src --- SORT_BEFORE_DIFF + +-- SORT_QUERY_RESULTS -- HIVE-2901 select x from (select * from (select value as x from src order by x limit 5)a union all select * from (select cast(NULL as string) as x from src limit 5)b )a; diff --git ql/src/test/queries/clientpositive/union_view.q ql/src/test/queries/clientpositive/union_view.q index 186cb026cf..a3787079d5 100644 --- ql/src/test/queries/clientpositive/union_view.q +++ ql/src/test/queries/clientpositive/union_view.q @@ -3,6 +3,8 @@ set hive.mapred.mode=nonstrict; set hive.stats.dbclass=fs; set hive.explain.user=false; +-- SORT_QUERY_RESULTS + CREATE TABLE src_union_1_n0 (key int, value string) PARTITIONED BY (ds string); CREATE TABLE src_union_2_n0 (key int, value string) PARTITIONED BY (ds string, part_1 string); diff --git ql/src/test/queries/clientpositive/vector_full_outer_join.q ql/src/test/queries/clientpositive/vector_full_outer_join.q new file mode 100644 index 0000000000..cc774887a0 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_full_outer_join.q @@ -0,0 +1,82 @@ +set hive.cli.print.header=true; +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +set hive.fetch.task.conversion=none; +set hive.auto.convert.join=true; +set hive.auto.convert.join.noconditionaltask=true; +set hive.auto.convert.join.noconditionaltask.size=10000; + +-- SORT_QUERY_RESULTS + +drop table if exists TJOIN1; +drop table if exists TJOIN2; +create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc; +create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc; +create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE ; +create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE ; +LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE; +LOAD DATA LOCAL INPATH '../../data/files/tjoin2.txt' OVERWRITE INTO TABLE TJOIN2STAGE; +INSERT INTO TABLE TJOIN1 SELECT * from TJOIN1STAGE; +INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE; + +SET hive.mapjoin.full.outer=true; + +set hive.vectorized.execution.enabled=false; +set hive.mapjoin.hybridgrace.hashtable=false; +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +set hive.vectorized.execution.enabled=false; +set hive.mapjoin.hybridgrace.hashtable=true; +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + + +set hive.vectorized.execution.enabled=true; +set hive.mapjoin.hybridgrace.hashtable=false; +SET hive.vectorized.execution.mapjoin.native.enabled=false; +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +set hive.vectorized.execution.enabled=true; +set hive.mapjoin.hybridgrace.hashtable=true; +SET hive.vectorized.execution.mapjoin.native.enabled=false; +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +set hive.vectorized.execution.enabled=true; +set hive.mapjoin.hybridgrace.hashtable=false; +SET hive.vectorized.execution.mapjoin.native.enabled=true; +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +set hive.vectorized.execution.enabled=true; +set hive.mapjoin.hybridgrace.hashtable=true; +SET hive.vectorized.execution.mapjoin.native.enabled=true; +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + + +-- Omit tjoin2.c1 +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +-- Omit tjoin2.c1 and tjoin2.c2 +explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); + +select tjoin1.rnum, tjoin1.c1, tjoin1.c2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ); diff --git ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_fast.q ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_fast.q new file mode 100644 index 0000000000..1685f35c2a --- /dev/null +++ ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_fast.q @@ -0,0 +1,290 @@ +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +set hive.vectorized.execution.enabled=true; +set hive.vectorized.execution.mapjoin.native.enabled=true; +set hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled=true; + +set hive.auto.convert.join=true; +SET hive.auto.convert.join.noconditionaltask=true; +set hive.stats.fetch.column.stats=false; + +------------------------------------------------------------------------------------------ +-- FULL OUTER Vectorized Native MapJoin variation for FAST hash table implementation. +------------------------------------------------------------------------------------------ + +-- SORT_QUERY_RESULTS + +------------------------------------------------------------------------------------------ +-- DYNAMIC PARTITION HASH JOIN +------------------------------------------------------------------------------------------ + +set hive.optimize.dynamic.partition.hashjoin=true; + +set hive.mapjoin.hybridgrace.hashtable=false; + +-- NOTE: Use very small sizes here to skip SHARED MEMORY MapJoin and force usage +-- NOTE: of DYNAMIC PARTITION HASH JOIN instead. +set hive.auto.convert.join.noconditionaltask.size=500; +set hive.exec.reducers.bytes.per.reducer=500; + +------------------------------------------------------------------------------------------ +-- Single LONG key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt; +CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt; + +CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt; +CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt; + +CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt; +CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt; + +CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt; +CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt; + +analyze table fullouter_long_big_1a compute statistics; +analyze table fullouter_long_big_1a compute statistics for columns; +analyze table fullouter_long_big_1a_nonull compute statistics; +analyze table fullouter_long_big_1a_nonull compute statistics for columns; +analyze table fullouter_long_small_1a compute statistics; +analyze table fullouter_long_small_1a compute statistics for columns; +analyze table fullouter_long_small_1a_nonull compute statistics; +analyze table fullouter_long_small_1a_nonull compute statistics for columns; + +-- Do first one with FULL OUTER MapJoin NOT Enabled. +SET hive.mapjoin.full.outer=false; +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SET hive.mapjoin.full.outer=true; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b; + +CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b; + +analyze table fullouter_long_big_1b compute statistics; +analyze table fullouter_long_big_1b compute statistics for columns; +analyze table fullouter_long_small_1b compute statistics; +analyze table fullouter_long_small_1b compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c; + +CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c; + +analyze table fullouter_long_big_1c compute statistics; +analyze table fullouter_long_big_1c compute statistics for columns; +analyze table fullouter_long_small_1c compute statistics; +analyze table fullouter_long_small_1c compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d; + +CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d; + +analyze table fullouter_long_big_1d compute statistics; +analyze table fullouter_long_big_1d compute statistics for columns; +analyze table fullouter_long_small_1d compute statistics; +analyze table fullouter_long_small_1d compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + + +------------------------------------------------------------------------------------------ +-- MULTI-KEY key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt; +CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt; + +CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt; +CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt; + +CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt; +CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt; + +CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt; +CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt; + +analyze table fullouter_multikey_big_1a compute statistics; +analyze table fullouter_multikey_big_1a compute statistics for columns; +analyze table fullouter_multikey_big_1a_nonull compute statistics; +analyze table fullouter_multikey_big_1a_nonull compute statistics for columns; +analyze table fullouter_multikey_small_1a compute statistics; +analyze table fullouter_multikey_small_1a compute statistics for columns; +analyze table fullouter_multikey_small_1a_nonull compute statistics; +analyze table fullouter_multikey_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Big table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Small table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + + + + +CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt; +CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt; + +CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt; +CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt; + +analyze table fullouter_multikey_big_1b_txt compute statistics; +analyze table fullouter_multikey_big_1b_txt compute statistics for columns; +analyze table fullouter_multikey_small_1b_txt compute statistics; +analyze table fullouter_multikey_small_1b_txt compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + + +------------------------------------------------------------------------------------------ +-- Single STRING key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt; +CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt; + +CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt; +CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt; + +CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt; +CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt; + +CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt; +CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt; + +analyze table fullouter_string_big_1a compute statistics; +analyze table fullouter_string_big_1a compute statistics for columns; +analyze table fullouter_string_big_1a_nonull compute statistics; +analyze table fullouter_string_big_1a_nonull compute statistics for columns; +analyze table fullouter_string_small_1a compute statistics; +analyze table fullouter_string_small_1a compute statistics for columns; +analyze table fullouter_string_small_1a_nonull compute statistics; +analyze table fullouter_string_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + + + diff --git ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_optimized.q ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_optimized.q new file mode 100644 index 0000000000..8b59266916 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_optimized.q @@ -0,0 +1,290 @@ +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +set hive.vectorized.execution.enabled=true; +set hive.vectorized.execution.mapjoin.native.enabled=true; +set hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled=false; + +set hive.auto.convert.join=true; +SET hive.auto.convert.join.noconditionaltask=true; +set hive.stats.fetch.column.stats=false; + +------------------------------------------------------------------------------------------ +-- FULL OUTER Vectorized Native MapJoin variation for OPTIMIZED hash table implementation. +------------------------------------------------------------------------------------------ + +-- SORT_QUERY_RESULTS + +------------------------------------------------------------------------------------------ +-- DYNAMIC PARTITION HASH JOIN +------------------------------------------------------------------------------------------ + +set hive.optimize.dynamic.partition.hashjoin=true; + +set hive.mapjoin.hybridgrace.hashtable=false; + +-- NOTE: Use very small sizes here to skip SHARED MEMORY MapJoin and force usage +-- NOTE: of DYNAMIC PARTITION HASH JOIN instead. +set hive.auto.convert.join.noconditionaltask.size=500; +set hive.exec.reducers.bytes.per.reducer=500; + +------------------------------------------------------------------------------------------ +-- Single LONG key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt; +CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt; + +CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt; +CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt; + +CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt; +CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt; + +CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt; +CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt; + +analyze table fullouter_long_big_1a compute statistics; +analyze table fullouter_long_big_1a compute statistics for columns; +analyze table fullouter_long_big_1a_nonull compute statistics; +analyze table fullouter_long_big_1a_nonull compute statistics for columns; +analyze table fullouter_long_small_1a compute statistics; +analyze table fullouter_long_small_1a compute statistics for columns; +analyze table fullouter_long_small_1a_nonull compute statistics; +analyze table fullouter_long_small_1a_nonull compute statistics for columns; + +-- Do first one with FULL OUTER MapJoin NOT Enabled. +SET hive.mapjoin.full.outer=false; +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SET hive.mapjoin.full.outer=true; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b; + +CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b; + +analyze table fullouter_long_big_1b compute statistics; +analyze table fullouter_long_big_1b compute statistics for columns; +analyze table fullouter_long_small_1b compute statistics; +analyze table fullouter_long_small_1b compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c; + +CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c; + +analyze table fullouter_long_big_1c compute statistics; +analyze table fullouter_long_big_1c compute statistics for columns; +analyze table fullouter_long_small_1c compute statistics; +analyze table fullouter_long_small_1c compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d; + +CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d; + +analyze table fullouter_long_big_1d compute statistics; +analyze table fullouter_long_big_1d compute statistics for columns; +analyze table fullouter_long_small_1d compute statistics; +analyze table fullouter_long_small_1d compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + + +------------------------------------------------------------------------------------------ +-- MULTI-KEY key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt; +CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt; + +CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt; +CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt; + +CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt; +CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt; + +CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt; +CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt; + +analyze table fullouter_multikey_big_1a compute statistics; +analyze table fullouter_multikey_big_1a compute statistics for columns; +analyze table fullouter_multikey_big_1a_nonull compute statistics; +analyze table fullouter_multikey_big_1a_nonull compute statistics for columns; +analyze table fullouter_multikey_small_1a compute statistics; +analyze table fullouter_multikey_small_1a compute statistics for columns; +analyze table fullouter_multikey_small_1a_nonull compute statistics; +analyze table fullouter_multikey_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Big table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Small table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + + + + +CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt; +CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt; + +CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt; +CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt; + +analyze table fullouter_multikey_big_1b_txt compute statistics; +analyze table fullouter_multikey_big_1b_txt compute statistics for columns; +analyze table fullouter_multikey_small_1b_txt compute statistics; +analyze table fullouter_multikey_small_1b_txt compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + + +------------------------------------------------------------------------------------------ +-- Single STRING key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt; +CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt; + +CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt; +CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt; + +CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt; +CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt; + +CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt; +CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt; + +analyze table fullouter_string_big_1a compute statistics; +analyze table fullouter_string_big_1a compute statistics for columns; +analyze table fullouter_string_big_1a_nonull compute statistics; +analyze table fullouter_string_big_1a_nonull compute statistics for columns; +analyze table fullouter_string_small_1a compute statistics; +analyze table fullouter_string_small_1a compute statistics for columns; +analyze table fullouter_string_small_1a_nonull compute statistics; +analyze table fullouter_string_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + + + diff --git ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_optimized_passthru.q ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_optimized_passthru.q new file mode 100644 index 0000000000..869668edb8 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_fullouter_mapjoin_1_optimized_passthru.q @@ -0,0 +1,290 @@ +set hive.mapred.mode=nonstrict; +set hive.explain.user=false; +set hive.vectorized.execution.enabled=true; +set hive.vectorized.execution.mapjoin.native.enabled=false; +set hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled=false; + +set hive.auto.convert.join=true; +SET hive.auto.convert.join.noconditionaltask=true; +set hive.stats.fetch.column.stats=false; + +------------------------------------------------------------------------------------------ +-- FULL OUTER Vectorized PASS-TRUE Mode MapJoin variation for OPTIMIZED hash table implementation. +------------------------------------------------------------------------------------------ + +-- SORT_QUERY_RESULTS + +------------------------------------------------------------------------------------------ +-- DYNAMIC PARTITION HASH JOIN +------------------------------------------------------------------------------------------ + +set hive.optimize.dynamic.partition.hashjoin=true; + +set hive.mapjoin.hybridgrace.hashtable=false; + +-- NOTE: Use very small sizes here to skip SHARED MEMORY MapJoin and force usage +-- NOTE: of DYNAMIC PARTITION HASH JOIN instead. +set hive.auto.convert.join.noconditionaltask.size=500; +set hive.exec.reducers.bytes.per.reducer=500; + +------------------------------------------------------------------------------------------ +-- Single LONG key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt; +CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt; + +CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt; +CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt; + +CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt; +CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt; + +CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt; +CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt; + +analyze table fullouter_long_big_1a compute statistics; +analyze table fullouter_long_big_1a compute statistics for columns; +analyze table fullouter_long_big_1a_nonull compute statistics; +analyze table fullouter_long_big_1a_nonull compute statistics for columns; +analyze table fullouter_long_small_1a compute statistics; +analyze table fullouter_long_small_1a compute statistics for columns; +analyze table fullouter_long_small_1a_nonull compute statistics; +analyze table fullouter_long_small_1a_nonull compute statistics for columns; + +-- Do first one with FULL OUTER MapJoin NOT Enabled. +SET hive.mapjoin.full.outer=false; +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SET hive.mapjoin.full.outer=true; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b; + +CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b; + +analyze table fullouter_long_big_1b compute statistics; +analyze table fullouter_long_big_1b compute statistics for columns; +analyze table fullouter_long_small_1b compute statistics; +analyze table fullouter_long_small_1b compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c; + +CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c; + +analyze table fullouter_long_big_1c compute statistics; +analyze table fullouter_long_big_1c compute statistics for columns; +analyze table fullouter_long_small_1c compute statistics; +analyze table fullouter_long_small_1c compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key; + + +CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d; + +CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d; + +analyze table fullouter_long_big_1d compute statistics; +analyze table fullouter_long_big_1d compute statistics for columns; +analyze table fullouter_long_small_1d compute statistics; +analyze table fullouter_long_small_1d compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key; + + +------------------------------------------------------------------------------------------ +-- MULTI-KEY key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt; +CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt; + +CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt; +CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt; + +CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt; +CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt; + +CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt; +CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt; + +analyze table fullouter_multikey_big_1a compute statistics; +analyze table fullouter_multikey_big_1a compute statistics for columns; +analyze table fullouter_multikey_big_1a_nonull compute statistics; +analyze table fullouter_multikey_big_1a_nonull compute statistics for columns; +analyze table fullouter_multikey_small_1a compute statistics; +analyze table fullouter_multikey_small_1a compute statistics for columns; +analyze table fullouter_multikey_small_1a_nonull compute statistics; +analyze table fullouter_multikey_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Big table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Small table without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1; + + + + +CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt; +CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt; + +CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt; +CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt; + +analyze table fullouter_multikey_big_1b_txt compute statistics; +analyze table fullouter_multikey_big_1b_txt compute statistics for columns; +analyze table fullouter_multikey_small_1b_txt compute statistics; +analyze table fullouter_multikey_small_1b_txt compute statistics for columns; + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1; + + +------------------------------------------------------------------------------------------ +-- Single STRING key +------------------------------------------------------------------------------------------ + +CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt; +CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt; + +CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt; +CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt; + +CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt; +CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt; + +CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ','; +LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt; +CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt; + +analyze table fullouter_string_big_1a compute statistics; +analyze table fullouter_string_big_1a compute statistics for columns; +analyze table fullouter_string_big_1a_nonull compute statistics; +analyze table fullouter_string_big_1a_nonull compute statistics for columns; +analyze table fullouter_string_small_1a compute statistics; +analyze table fullouter_string_small_1a compute statistics for columns; +analyze table fullouter_string_small_1a_nonull compute statistics; +analyze table fullouter_string_small_1a_nonull compute statistics for columns; + + +EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Big table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key; + +-- Small table without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + +-- Both Big and Small tables without NULL key(s). +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key; + + + diff --git ql/src/test/queries/clientpositive/vector_join30.q ql/src/test/queries/clientpositive/vector_join30.q index 9672a475f7..74c4433643 100644 --- ql/src/test/queries/clientpositive/vector_join30.q +++ ql/src/test/queries/clientpositive/vector_join30.q @@ -11,7 +11,7 @@ SET hive.auto.convert.join.noconditionaltask.size=1000000000; CREATE TABLE orcsrc_n0 STORED AS ORC AS SELECT * FROM src; -explain vectorization expression +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -19,14 +19,14 @@ JOIN ON (x.key = Y.key) select sum(hash(Y.key,Y.value)); -FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -select sum(hash(Y.key,Y.value)); +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- select sum(hash(Y.key,Y.value)); -explain vectorization expression +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -34,116 +34,238 @@ LEFT OUTER JOIN ON (x.key = Y.key) select sum(hash(Y.key,Y.value)); +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN +RIGHT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) select sum(hash(Y.key,Y.value)); -explain vectorization expression +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- RIGHT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) select sum(hash(Y.key,Y.value)); +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) +JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -explain vectorization expression +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -JOIN +LEFT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -FROM +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN +LEFT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -JOIN +LEFT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -explain vectorization expression +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN +LEFT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -LEFT OUTER JOIN +RIGHT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -FROM +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- RIGHT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN +RIGHT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -LEFT OUTER JOIN +RIGHT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -explain vectorization expression +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- RIGHT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- RIGHT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +----------------- + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN +JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -LEFT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -FROM +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -LEFT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -explain vectorization expression +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -RIGHT OUTER JOIN +LEFT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -FROM +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -explain vectorization expression +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- LEFT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) RIGHT OUTER JOIN @@ -151,12 +273,33 @@ RIGHT OUTER JOIN ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); -FROM +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- RIGHT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); + +explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x RIGHT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)); + +-- FROM +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +-- RIGHT OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +-- ON (x.key = Y.key) +-- FULL OUTER JOIN +-- (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +-- ON (x.key = Z.key) +-- select sum(hash(Y.key,Y.value)); diff --git ql/src/test/queries/clientpositive/vector_join_filters.q ql/src/test/queries/clientpositive/vector_join_filters.q index 88458f89e8..b9f3740b5c 100644 --- ql/src/test/queries/clientpositive/vector_join_filters.q +++ ql/src/test/queries/clientpositive/vector_join_filters.q @@ -14,7 +14,10 @@ CREATE TABLE myinput1_n1 STORED AS ORC AS SELECT * FROM myinput1_txt_n0; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a LEFT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; @@ -26,10 +29,21 @@ SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a LEFT OUTER JOIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a LEFT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a LEFT OUTER JOIN myinput1_n1 b ON a.key = b.key and a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n1 a LEFT OUTER JOIN myinput1_n1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1_n1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value); SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) LEFT OUTER JOIN myinput1_n1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value); diff --git ql/src/test/queries/clientpositive/vector_join_nulls.q ql/src/test/queries/clientpositive/vector_join_nulls.q index 3e8df9a850..f87dc44e1c 100644 --- ql/src/test/queries/clientpositive/vector_join_nulls.q +++ ql/src/test/queries/clientpositive/vector_join_nulls.q @@ -14,7 +14,11 @@ CREATE TABLE myinput1_n4 STORED AS ORC AS SELECT * FROM myinput1_txt_n1; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a JOIN myinput1_n4 b; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a LEFT OUTER JOIN myinput1_n4 b; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b; + SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a JOIN myinput1_n4 b ON a.key = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a JOIN myinput1_n4 b ON a.key = b.key; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a JOIN myinput1_n4 b ON a.value = b.value; @@ -23,9 +27,21 @@ SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a LEFT OUTER JOIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a LEFT OUTER JOIN myinput1_n4 b ON a.value = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a LEFT OUTER JOIN myinput1_n4 b ON a.key = b.key; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a LEFT OUTER JOIN myinput1_n4 b ON a.key = b.key and a.value=b.value; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.value; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.key; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.key; + +EXPLAIN VECTORIZATION OPERATOR SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.value = b.value; +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.value = b.value; + +EXPLAIN VECTORIZATION OPERATOR +-- SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key=b.key and a.value = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key=b.key and a.value = b.value; SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n4 a LEFT OUTER JOIN myinput1_n4 b ON (a.value=b.value) RIGHT OUTER JOIN myinput1_n4 c ON (b.value=c.value); diff --git ql/src/test/queries/clientpositive/vector_left_outer_join2.q ql/src/test/queries/clientpositive/vector_left_outer_join2.q index 3e5ec7e35e..84f656b99e 100644 --- ql/src/test/queries/clientpositive/vector_left_outer_join2.q +++ ql/src/test/queries/clientpositive/vector_left_outer_join2.q @@ -1,3 +1,4 @@ +set hive.cli.print.header=true; set hive.mapred.mode=nonstrict; set hive.explain.user=false; set hive.fetch.task.conversion=none; @@ -20,14 +21,14 @@ INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE; set hive.vectorized.execution.enabled=false; set hive.mapjoin.hybridgrace.hashtable=false; -explain vectorization expression +explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); set hive.vectorized.execution.enabled=false; set hive.mapjoin.hybridgrace.hashtable=true; -explain +explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); @@ -36,7 +37,7 @@ select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left out set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=false; SET hive.vectorized.execution.mapjoin.native.enabled=false; -explain vectorization expression +explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); @@ -44,7 +45,7 @@ select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left out set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=true; SET hive.vectorized.execution.mapjoin.native.enabled=false; -explain vectorization expression +explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); @@ -52,7 +53,7 @@ select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left out set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=false; SET hive.vectorized.execution.mapjoin.native.enabled=true; -explain vectorization expression +explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); @@ -60,7 +61,7 @@ select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left out set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=true; SET hive.vectorized.execution.mapjoin.native.enabled=true; -explain vectorization expression +explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ); \ No newline at end of file diff --git ql/src/test/queries/clientpositive/vector_leftsemi_mapjoin.q ql/src/test/queries/clientpositive/vector_leftsemi_mapjoin.q index 6ecfa1ad33..f9b4222a23 100644 --- ql/src/test/queries/clientpositive/vector_leftsemi_mapjoin.q +++ ql/src/test/queries/clientpositive/vector_leftsemi_mapjoin.q @@ -4,6 +4,7 @@ set hive.fetch.task.conversion=none; set hive.auto.convert.join=true; set hive.auto.convert.join.noconditionaltask=true; set hive.auto.convert.join.noconditionaltask.size=10000; +SET hive.merge.nway.joins=false; -- SORT_QUERY_RESULTS @@ -27,167 +28,190 @@ select * from t4_n19; set hive.vectorized.execution.enabled=false; set hive.mapjoin.hybridgrace.hashtable=false; -explain vectorization only summary - +explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; -explain vectorization only summary +explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; -explain vectorization only summary +explain vectorization expression select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; -explain vectorization only summary +explain vectorization expression select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; -explain vectorization only summary +explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; -explain vectorization only summary +explain vectorization expression select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization only summary +explain vectorization expression select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization only summary +SET hive.mapjoin.full.outer=false; +explain vectorization expression select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization only summary +SET hive.mapjoin.full.outer=true; +explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; + +-- Verify this works (FULL OUTER MapJoin is not enabled for N-way) +SET hive.merge.nway.joins=true; +explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +SET hive.merge.nway.joins=false; + +explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; -explain vectorization only summary -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=false; +explain vectorization expression +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization only summary +SET hive.mapjoin.full.outer=true; +explain vectorization expression select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization only summary +explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; -explain vectorization only summary +explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; set hive.vectorized.execution.enabled=false; set hive.mapjoin.hybridgrace.hashtable=true; +set hive.llap.enable.grace.join.in.llap=true; -explain vectorization summary +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; -explain vectorization summary +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; -explain vectorization summary +explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; -explain vectorization summary +explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; -explain vectorization summary +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; -explain vectorization summary +explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization summary +explain vectorization operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization summary +SET hive.mapjoin.full.outer=false; +explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization summary +SET hive.mapjoin.full.outer=true; +explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; + +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; -explain vectorization summary -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; - -explain vectorization summary +SET hive.mapjoin.full.outer=false; +explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization summary +SET hive.mapjoin.full.outer=true; +explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +-- select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; + +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; -explain vectorization summary +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; @@ -251,21 +275,29 @@ explain vectorization only operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization only operator +SET hive.mapjoin.full.outer=false; +explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=true; +explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +-- select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; + explain vectorization only operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=false; explain vectorization only operator -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=true; explain vectorization only operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +-- select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; explain vectorization only operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; @@ -277,85 +309,94 @@ select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a. set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=true; +set hive.llap.enable.grace.join.in.llap=true; SET hive.vectorized.execution.mapjoin.native.enabled=false; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; -explain vectorization detail +explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=false; +explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=true; +explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; + +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; -explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=false; +explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=true; +explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; @@ -363,166 +404,183 @@ set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=false; SET hive.vectorized.execution.mapjoin.native.enabled=true; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; -explain vectorization detail +explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=false; +explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; + +SET hive.mapjoin.full.outer=true; +explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; -explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=false; +explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=true; +explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; set hive.vectorized.execution.enabled=true; set hive.mapjoin.hybridgrace.hashtable=true; +set hive.llap.enable.grace.join.in.llap=true; SET hive.vectorized.execution.mapjoin.native.enabled=true; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value; -explain vectorization detail +explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value ; -explain vectorization detail +explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key; -explain vectorization detail +explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value; -explain vectorization detail +explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=false; +explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=true; +explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key; + +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key; -explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key; +SET hive.mapjoin.full.outer=false; +explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +SET hive.mapjoin.full.outer=true; +explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key; -explain vectorization detail +explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100; diff --git ql/src/test/queries/clientpositive/vector_nullsafe_join.q ql/src/test/queries/clientpositive/vector_nullsafe_join.q index 6a7ff72679..2d7155edd3 100644 --- ql/src/test/queries/clientpositive/vector_nullsafe_join.q +++ ql/src/test/queries/clientpositive/vector_nullsafe_join.q @@ -38,7 +38,11 @@ select * from myinput1 a join myinput1 b on a.key<=>b.value AND a.value<=>b.key -- outer joins SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key<=>b.value; + +EXPLAIN VECTORIZATION DETAIL DEBUG SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value; +-- SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value; + SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key<=>b.value; -- map joins @@ -65,7 +69,11 @@ select * from myinput1 a join myinput1 b on a.key<=>b.value AND a.value<=>b.key -- outer joins SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ON a.key<=>b.value; + +EXPLAIN VECTORIZATION DETAIL DEBUG SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value; +-- SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value; + SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key<=>b.value; -- map joins diff --git ql/src/test/queries/clientpositive/vectorized_join46.q ql/src/test/queries/clientpositive/vectorized_join46.q index 145bc02073..5eda9f4c7d 100644 --- ql/src/test/queries/clientpositive/vectorized_join46.q +++ ql/src/test/queries/clientpositive/vectorized_join46.q @@ -1,3 +1,4 @@ +set hive.cli.print.header=true; set hive.vectorized.execution.enabled=true; set hive.auto.convert.join=true; set hive.strict.checks.cartesian.product=false; @@ -15,7 +16,7 @@ INSERT INTO test2_n9 VALUES (102, 2, 'Del'), (103, 2, 'Ema'), -- Basic outer join -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value); @@ -25,7 +26,7 @@ FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value); -- Conjunction with pred on multiple inputs and single inputs (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -39,7 +40,7 @@ ON (test1_n14.value=test2_n9.value AND test2_n9.key between 100 and 102); -- Conjunction with pred on single inputs (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102 @@ -51,7 +52,7 @@ ON (test1_n14.key between 100 and 102 AND test2_n9.key between 100 and 102); -- Conjunction with pred on multiple inputs and none (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND true); @@ -61,7 +62,7 @@ FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND true); -- Condition on one input (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102); @@ -71,7 +72,7 @@ FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102); -- Disjunction with pred on multiple inputs and single inputs (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -85,7 +86,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102); -- Disjunction with pred on multiple inputs and left input (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -97,7 +98,7 @@ ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102); -- Disjunction with pred on multiple inputs and right input (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -109,7 +110,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102); -- Keys plus residual (left outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -123,7 +124,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102)); -- Disjunction with pred on multiple inputs and single inputs (right outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -137,7 +138,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102); -- Disjunction with pred on multiple inputs and left input (right outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -149,7 +150,7 @@ ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102); -- Disjunction with pred on multiple inputs and right input (right outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -161,7 +162,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102); -- Keys plus residual (right outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -175,7 +176,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102)); -- Disjunction with pred on multiple inputs and single inputs (full outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -189,7 +190,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102); -- Disjunction with pred on multiple inputs and left input (full outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -201,7 +202,7 @@ ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102); -- Disjunction with pred on multiple inputs and right input (full outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value @@ -213,7 +214,7 @@ ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102); -- Keys plus residual (full outer join) -EXPLAIN +EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value diff --git ql/src/test/queries/clientpositive/vectorized_join46_mr.q ql/src/test/queries/clientpositive/vectorized_join46_mr.q new file mode 100644 index 0000000000..3d4627bd39 --- /dev/null +++ ql/src/test/queries/clientpositive/vectorized_join46_mr.q @@ -0,0 +1,228 @@ +set hive.cli.print.header=true; +set hive.vectorized.execution.enabled=true; +set hive.auto.convert.join=true; +set hive.strict.checks.cartesian.product=false; +set hive.join.emit.interval=2; + +-- SORT_QUERY_RESULTS + +CREATE TABLE test1 (key INT, value INT, col_1 STRING); +INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'), + (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car'); + +CREATE TABLE test2 (key INT, value INT, col_2 STRING); +INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'), + (104, 3, 'Fli'), (105, NULL, 'None'); + + +-- Basic outer join +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value); + +-- Conjunction with pred on multiple inputs and single inputs (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND test1.key between 100 and 102 + AND test2.key between 100 and 102); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND test1.key between 100 and 102 + AND test2.key between 100 and 102); + +-- Conjunction with pred on single inputs (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102 + AND test2.key between 100 and 102); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102 + AND test2.key between 100 and 102); + +-- Conjunction with pred on multiple inputs and none (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value AND true); + +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value AND true); + +-- Condition on one input (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and single inputs (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and left input (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and right input (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102); + +-- Keys plus residual (left outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)); + +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)); + +-- Disjunction with pred on multiple inputs and single inputs (right outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102); + +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and left input (right outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102); + +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and right input (right outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102); + +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102); + +-- Keys plus residual (right outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)); + +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)); + +-- Disjunction with pred on multiple inputs and single inputs (full outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102); + +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and left input (full outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102); + +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102); + +-- Disjunction with pred on multiple inputs and right input (full outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102); + +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102); + +-- Keys plus residual (full outer join) +EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)); + +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)); diff --git ql/src/test/results/clientpositive/acid_stats2.q.out ql/src/test/results/clientpositive/acid_stats2.q.out index 5fc0505462..ccd8999d4a 100644 --- ql/src/test/results/clientpositive/acid_stats2.q.out +++ ql/src/test/results/clientpositive/acid_stats2.q.out @@ -235,3 +235,515 @@ POSTHOOK: query: drop table stats3 POSTHOOK: type: DROPTABLE POSTHOOK: Input: default@stats3 POSTHOOK: Output: default@stats3 +PREHOOK: query: create table stats4(key int,value string) partitioned by (ds string) clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@stats4 +POSTHOOK: query: create table stats4(key int,value string) partitioned by (ds string) clustered by (value) into 2 buckets stored as orc tblproperties ("transactional"="true") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@stats4 +PREHOOK: query: insert into table stats4 partition (ds) values (12341234, 'bob', 'today'),(123471234871239847, 'bob', 'today'),(431, 'tracy', 'tomorrow') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@stats4 +POSTHOOK: query: insert into table stats4 partition (ds) values (12341234, 'bob', 'today'),(123471234871239847, 'bob', 'today'),(431, 'tracy', 'tomorrow') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@stats4@ds=today +POSTHOOK: Output: default@stats4@ds=tomorrow +POSTHOOK: Lineage: stats4 PARTITION(ds=today).key SCRIPT [] +POSTHOOK: Lineage: stats4 PARTITION(ds=today).value SCRIPT [] +POSTHOOK: Lineage: stats4 PARTITION(ds=tomorrow).key SCRIPT [] +POSTHOOK: Lineage: stats4 PARTITION(ds=tomorrow).value SCRIPT [] +PREHOOK: query: desc formatted stats4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + bucketing_version 2 + numFiles 2 + numPartitions 2 + numRows 3 + rawDataSize 0 + totalSize 1544 + transactional true + transactional_properties default +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted stats4 partition(ds='tomorrow') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 partition(ds='tomorrow') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Partition Information +Partition Value: [tomorrow] +Database: default +Table: stats4 +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}} + numFiles 1 + numRows 1 + rawDataSize 0 + totalSize 747 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted stats4 partition(ds='today') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 partition(ds='today') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Partition Information +Partition Value: [today] +Database: default +Table: stats4 +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}} + numFiles 1 + numRows 2 + rawDataSize 0 + totalSize 797 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from stats4 +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from stats4 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from stats4 +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from stats4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +#### A masked pattern was here #### +3 +PREHOOK: query: delete from stats4 where value = 'tracy' and ds = 'tomorrow' +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +PREHOOK: Input: default@stats4@ds=tomorrow +PREHOOK: Output: default@stats4@ds=tomorrow +POSTHOOK: query: delete from stats4 where value = 'tracy' and ds = 'tomorrow' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +POSTHOOK: Input: default@stats4@ds=tomorrow +POSTHOOK: Output: default@stats4@ds=tomorrow +PREHOOK: query: desc formatted stats4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + bucketing_version 2 + numFiles 3 + numPartitions 2 + numRows 2 + rawDataSize 0 + totalSize 2241 + transactional true + transactional_properties default +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted stats4 partition(ds='tomorrow') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 partition(ds='tomorrow') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Partition Information +Partition Value: [tomorrow] +Database: default +Table: stats4 +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 1444 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted stats4 partition(ds='today') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 partition(ds='today') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Partition Information +Partition Value: [today] +Database: default +Table: stats4 +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}} + numFiles 1 + numRows 2 + rawDataSize 0 + totalSize 797 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from stats4 +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from stats4 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from stats4 +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from stats4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +#### A masked pattern was here #### +2 +PREHOOK: query: explain select count(*) from stats4 where ds = 'tomorrow' +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from stats4 where ds = 'tomorrow' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from stats4 where ds = 'tomorrow' +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from stats4 where ds = 'tomorrow' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +#### A masked pattern was here #### +0 +PREHOOK: query: delete from stats4 where key > 12341234 and ds = 'today' +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +PREHOOK: Input: default@stats4@ds=today +PREHOOK: Output: default@stats4@ds=today +POSTHOOK: query: delete from stats4 where key > 12341234 and ds = 'today' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +POSTHOOK: Input: default@stats4@ds=today +POSTHOOK: Output: default@stats4@ds=today +PREHOOK: query: desc formatted stats4 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + bucketing_version 2 + numFiles 4 + numPartitions 2 + numRows 1 + rawDataSize 0 + totalSize 2937 + transactional true + transactional_properties default +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted stats4 partition(ds='tomorrow') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 partition(ds='tomorrow') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Partition Information +Partition Value: [tomorrow] +Database: default +Table: stats4 +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 1444 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: desc formatted stats4 partition(ds='today') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats4 +POSTHOOK: query: desc formatted stats4 partition(ds='today') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats4 +# col_name data_type comment +key int +value string + +# Partition Information +# col_name data_type comment +ds string + +# Detailed Partition Information +Partition Value: [today] +Database: default +Table: stats4 +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 2 + numRows 1 + rawDataSize 0 + totalSize 1493 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 2 +Bucket Columns: [value] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from stats4 +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from stats4 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from stats4 +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from stats4 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +#### A masked pattern was here #### +1 +PREHOOK: query: explain select count(*) from stats4 where ds = 'tomorrow' +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from stats4 where ds = 'tomorrow' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from stats4 where ds = 'tomorrow' +PREHOOK: type: QUERY +PREHOOK: Input: default@stats4 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from stats4 where ds = 'tomorrow' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@stats4 +#### A masked pattern was here #### +0 +PREHOOK: query: drop table stats4 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@stats4 +PREHOOK: Output: default@stats4 +POSTHOOK: query: drop table stats4 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@stats4 +POSTHOOK: Output: default@stats4 diff --git ql/src/test/results/clientpositive/annotate_stats_join.q.out ql/src/test/results/clientpositive/annotate_stats_join.q.out index c2bf2e5f96..92b2fd8beb 100644 --- ql/src/test/results/clientpositive/annotate_stats_join.q.out +++ ql/src/test/results/clientpositive/annotate_stats_join.q.out @@ -893,7 +893,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string), _col1 (type: int) 1 _col1 (type: string), _col0 (type: int) diff --git ql/src/test/results/clientpositive/auto_join18.q.out ql/src/test/results/clientpositive/auto_join18.q.out index 7e8de946c2..5851e2d788 100644 --- ql/src/test/results/clientpositive/auto_join18.q.out +++ ql/src/test/results/clientpositive/auto_join18.q.out @@ -89,7 +89,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out index 9c0bffda30..b692193edc 100644 --- ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out +++ ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out @@ -91,7 +91,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/auto_join6.q.out ql/src/test/results/clientpositive/auto_join6.q.out index a3e829fdda..578906edc7 100644 --- ql/src/test/results/clientpositive/auto_join6.q.out +++ ql/src/test/results/clientpositive/auto_join6.q.out @@ -83,7 +83,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/auto_join7.q.out ql/src/test/results/clientpositive/auto_join7.q.out index 1f2616eddf..a094d2745f 100644 --- ql/src/test/results/clientpositive/auto_join7.q.out +++ ql/src/test/results/clientpositive/auto_join7.q.out @@ -110,7 +110,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 0 to 2 keys: 0 _col0 (type: string) diff --git ql/src/test/results/clientpositive/beeline/mapjoin2.q.out ql/src/test/results/clientpositive/beeline/mapjoin2.q.out index 6b85e13097..2288b4b29e 100644 --- ql/src/test/results/clientpositive/beeline/mapjoin2.q.out +++ ql/src/test/results/clientpositive/beeline/mapjoin2.q.out @@ -26,6 +26,85 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@tbl_n1 POSTHOOK: Lineage: tbl_n1.n SCRIPT [] POSTHOOK: Lineage: tbl_n1.t SCRIPT [] +Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tbl_n1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tbl_n1 + TableScan + alias: tbl_n1 + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: false (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: n (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 1L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 1L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: t (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 1L (type: bigint), _col0 (type: string), _col1 is null (type: boolean), _col2 is null (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -36,6 +115,91 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### 1 one true true +Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:tbl_n1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:tbl_n1 + TableScan + alias: tbl_n1 + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: false (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: n (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 2L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 2L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: t (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 is null (type: boolean), _col1 is null (type: boolean), 2L (type: bigint), _col2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -46,6 +210,81 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### true true 2 two +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 1L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 1L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 1L (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: string) + TableScan + alias: tbl_n1 + filterExpr: (n = 2L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 2L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 2L (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 + 1 {false} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 is null (type: boolean), _col1 is null (type: boolean), _col2 is null (type: boolean), _col3 is null (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -57,6 +296,74 @@ POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### false false true true true true false false +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 1 (type: int), 0 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -67,6 +374,77 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col1, _col2 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), _col1 (type: int), 0 (type: int), _col2 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -77,6 +455,83 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), 1 (type: int), _col2 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -87,6 +542,83 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), 1 (type: int), _col2 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out index 733a3f5c18..122b13675f 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_1.q.out @@ -274,7 +274,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2060 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -496,7 +496,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2080 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_13.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_13.q.out index 123e84457a..81e34ee440 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_13.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_13.q.out @@ -97,7 +97,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + tag: -1 TopN: 10 @@ -278,7 +278,7 @@ STAGE PLANS: Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out index e39edcc2fd..480b12eb19 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_2.q.out @@ -232,7 +232,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2220 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -458,7 +458,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2080 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out index fa7968da80..ba0a2ce723 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_3.q.out @@ -231,7 +231,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2220 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -455,7 +455,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2060 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/beeline/smb_mapjoin_7.q.out ql/src/test/results/clientpositive/beeline/smb_mapjoin_7.q.out index 4b1313dc93..16137dda45 100644 --- ql/src/test/results/clientpositive/beeline/smb_mapjoin_7.q.out +++ ql/src/test/results/clientpositive/beeline/smb_mapjoin_7.q.out @@ -633,7 +633,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/cbo_rp_join1.q.out ql/src/test/results/clientpositive/cbo_rp_join1.q.out index c5ec00dcc9..38cb34d063 100644 --- ql/src/test/results/clientpositive/cbo_rp_join1.q.out +++ ql/src/test/results/clientpositive/cbo_rp_join1.q.out @@ -53,7 +53,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 filter predicates: 0 {(VALUE._col0 = 40)} 1 {(VALUE._col0 = 40)} @@ -156,7 +156,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 filter predicates: 0 {(VALUE._col0 = 40)} {(VALUE._col1 = 40)} 1 {(VALUE._col0 = 40)} @@ -259,7 +259,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 filter predicates: 0 {(VALUE._col0 = 40)} 1 {(VALUE._col0 = 40)} @@ -362,7 +362,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 filter predicates: 0 {(VALUE._col0 > 40)} {(VALUE._col1 > 50)} {(VALUE._col0 = VALUE._col1)} 1 {(VALUE._col0 > 40)} {(VALUE._col1 > 50)} {(VALUE._col0 = VALUE._col1)} diff --git ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out index e83a370391..a9206f7070 100644 --- ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out +++ ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out @@ -119,5 +119,5 @@ POSTHOOK: query: select * from space order by ` left` POSTHOOK: type: QUERY POSTHOOK: Input: default@space #### A masked pattern was here #### -NULL 2 NULL 1 2 3 +NULL 2 NULL diff --git ql/src/test/results/clientpositive/correlated_join_keys.q.out ql/src/test/results/clientpositive/correlated_join_keys.q.out index 1e4c67ad8a..c0a77098b8 100644 --- ql/src/test/results/clientpositive/correlated_join_keys.q.out +++ ql/src/test/results/clientpositive/correlated_join_keys.q.out @@ -120,7 +120,7 @@ STAGE PLANS: keys: 0 _col0 (type: string), _col1 (type: string) 1 _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash diff --git ql/src/test/results/clientpositive/correlationoptimizer14.q.out ql/src/test/results/clientpositive/correlationoptimizer14.q.out index cca6892662..e761caf537 100644 --- ql/src/test/results/clientpositive/correlationoptimizer14.q.out +++ ql/src/test/results/clientpositive/correlationoptimizer14.q.out @@ -1538,40 +1538,3 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 #### A masked pattern was here #### -128 val_128 128 1 -128 val_128 128 1 -128 val_128 128 1 -146 val_146 146 1 -146 val_146 146 1 -150 val_150 150 1 -213 val_213 213 1 -213 val_213 213 1 -224 val_224 224 1 -224 val_224 224 1 -238 val_238 238 1 -238 val_238 238 1 -255 val_255 255 1 -255 val_255 255 1 -273 val_273 273 1 -273 val_273 273 1 -273 val_273 273 1 -278 val_278 278 1 -278 val_278 278 1 -311 val_311 311 1 -311 val_311 311 1 -311 val_311 311 1 -369 val_369 369 1 -369 val_369 369 1 -369 val_369 369 1 -401 val_401 401 1 -401 val_401 401 1 -401 val_401 401 1 -401 val_401 401 1 -401 val_401 401 1 -406 val_406 406 1 -406 val_406 406 1 -406 val_406 406 1 -406 val_406 406 1 -66 val_66 66 1 -98 val_98 98 1 -98 val_98 98 1 diff --git ql/src/test/results/clientpositive/correlationoptimizer8.q.out ql/src/test/results/clientpositive/correlationoptimizer8.q.out index 102cc01ad3..c403941af8 100644 --- ql/src/test/results/clientpositive/correlationoptimizer8.q.out +++ ql/src/test/results/clientpositive/correlationoptimizer8.q.out @@ -1102,7 +1102,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 UDFToDouble(_col0) (type: double) 1 UDFToDouble(_col0) (type: double) diff --git ql/src/test/results/clientpositive/create_view.q.out ql/src/test/results/clientpositive/create_view.q.out index d2c516fdfe..99d1543df9 100644 --- ql/src/test/results/clientpositive/create_view.q.out +++ ql/src/test/results/clientpositive/create_view.q.out @@ -1604,6 +1604,16 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Input: default@view18 #### A masked pattern was here #### 2 +PREHOOK: query: create view if not exists view18 as select v+1 from (select 1 as v) t +PREHOOK: type: CREATEVIEW +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: database:default +PREHOOK: Output: default@view18 +POSTHOOK: query: create view if not exists view18 as select v+1 from (select 1 as v) t +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: database:default +POSTHOOK: Output: default@view18 PREHOOK: query: DROP VIEW view1 PREHOOK: type: DROPVIEW PREHOOK: Input: default@view1 diff --git ql/src/test/results/clientpositive/ctas_colname.q.out ql/src/test/results/clientpositive/ctas_colname.q.out index ff1fd8d204..0c08845548 100644 --- ql/src/test/results/clientpositive/ctas_colname.q.out +++ ql/src/test/results/clientpositive/ctas_colname.q.out @@ -184,7 +184,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -350,7 +350,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/decimal_3.q.out ql/src/test/results/clientpositive/decimal_3.q.out index 3ded9a7b44..d2e39571fd 100644 --- ql/src/test/results/clientpositive/decimal_3.q.out +++ ql/src/test/results/clientpositive/decimal_3.q.out @@ -32,7 +32,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -70,6 +69,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key DESC, value DESC PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3 @@ -124,7 +124,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -162,6 +161,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3 @@ -170,7 +170,6 @@ POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_3 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3 #### A masked pattern was here #### -NULL -1234567890.123456789000000000 -4400.000000000000000000 -1255.490000000000000000 @@ -199,6 +198,7 @@ NULL 125.200000000000000000 200.000000000000000000 1234567890.123456780000000000 +NULL PREHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3 @@ -207,7 +207,6 @@ POSTHOOK: query: SELECT key, sum(value) FROM DECIMAL_3 GROUP BY key ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -236,6 +235,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT value, sum(key) FROM DECIMAL_3 GROUP BY value ORDER BY value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3 diff --git ql/src/test/results/clientpositive/decimal_4.q.out ql/src/test/results/clientpositive/decimal_4.q.out index 8eb1de4256..9d3ee84f3b 100644 --- ql/src/test/results/clientpositive/decimal_4.q.out +++ ql/src/test/results/clientpositive/decimal_4.q.out @@ -56,7 +56,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_1 #### A masked pattern was here #### -NULL 0 -1234567890.1234567890000000000000000 -1234567890 -4400.0000000000000000000000000 4400 -1255.4900000000000000000000000 -1255 @@ -94,6 +93,7 @@ NULL 0 125.2000000000000000000000000 125 200.0000000000000000000000000 200 1234567890.1234567800000000000000000 1234567890 +NULL 0 PREHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2 @@ -102,7 +102,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -140,6 +139,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: DROP TABLE DECIMAL_4_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_4_1 diff --git ql/src/test/results/clientpositive/decimal_5.q.out ql/src/test/results/clientpositive/decimal_5.q.out index d94f5f2e32..f24588c5cb 100644 --- ql/src/test/results/clientpositive/decimal_5.q.out +++ ql/src/test/results/clientpositive/decimal_5.q.out @@ -32,9 +32,6 @@ POSTHOOK: query: SELECT key FROM DECIMAL_5_n0 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5_n0 #### A masked pattern was here #### -NULL -NULL -NULL -4400.00000 -1255.49000 -1.12200 @@ -70,6 +67,9 @@ NULL 124.00000 125.20000 200.00000 +NULL +NULL +NULL PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_5_n0 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5_n0 @@ -78,7 +78,6 @@ POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_5_n0 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5_n0 #### A masked pattern was here #### -NULL -4400.00000 -1255.49000 -1.12200 @@ -105,6 +104,7 @@ NULL 124.00000 125.20000 200.00000 +NULL PREHOOK: query: SELECT cast(key as decimal) FROM DECIMAL_5_n0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5_n0 diff --git ql/src/test/results/clientpositive/decimal_6.q.out ql/src/test/results/clientpositive/decimal_6.q.out index 1959dd9f37..83cadcef8d 100644 --- ql/src/test/results/clientpositive/decimal_6.q.out +++ ql/src/test/results/clientpositive/decimal_6.q.out @@ -72,12 +72,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_1_n0 POSTHOOK: Input: default@decimal_6_2_n0 #### A masked pattern was here #### -NULL -NULL -NULL -NULL -NULL -NULL -1234567890.12350 -4400.00000 -4400.00000 @@ -126,6 +120,12 @@ NULL 2389432.23750 2389432.23750 1234567890.12350 +NULL +NULL +NULL +NULL +NULL +NULL PREHOOK: query: CREATE TABLE DECIMAL_6_3_n0 AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1_n0 ORDER BY v PREHOOK: type: CREATETABLE_AS_SELECT PREHOOK: Input: default@decimal_6_1_n0 diff --git ql/src/test/results/clientpositive/decimal_precision.q.out ql/src/test/results/clientpositive/decimal_precision.q.out index 921d86bff9..fb59c4ff1e 100644 --- ql/src/test/results/clientpositive/decimal_precision.q.out +++ ql/src/test/results/clientpositive/decimal_precision.q.out @@ -32,6 +32,37 @@ POSTHOOK: query: SELECT * FROM DECIMAL_PRECISION_n0 ORDER BY `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_n0 #### A masked pattern was here #### +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0.1234567890 +0.1234567890 +1.2345678901 +1.2345678901 +1.2345678901 +12.3456789012 +12.3456789012 +12.3456789012 +123.4567890123 +123.4567890123 +123.4567890123 +1234.5678901235 +1234.5678901235 +1234.5678901235 +12345.6789012346 +12345.6789012346 +123456.7890123456 +123456.7890123457 +1234567.8901234560 +1234567.8901234568 +12345678.9012345600 +12345678.9012345679 +123456789.0123456000 +123456789.0123456789 +1234567890.1234560000 +1234567890.1234567890 NULL NULL NULL @@ -76,37 +107,6 @@ NULL NULL NULL NULL -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 -0.1234567890 -0.1234567890 -1.2345678901 -1.2345678901 -1.2345678901 -12.3456789012 -12.3456789012 -12.3456789012 -123.4567890123 -123.4567890123 -123.4567890123 -1234.5678901235 -1234.5678901235 -1234.5678901235 -12345.6789012346 -12345.6789012346 -123456.7890123456 -123456.7890123457 -1234567.8901234560 -1234567.8901234568 -12345678.9012345600 -12345678.9012345679 -123456789.0123456000 -123456789.0123456789 -1234567890.1234560000 -1234567890.1234567890 PREHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision_n0 @@ -115,50 +115,6 @@ POSTHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION_n0 OR POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_n0 #### A masked pattern was here #### -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 @@ -190,14 +146,6 @@ NULL NULL NULL 123456789.0123456789 123456790.0123456789 123456788.0123456789 1234567890.1234560000 1234567891.1234560000 1234567889.1234560000 1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 -PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_n0 -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_n0 -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL @@ -242,6 +190,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_n0 +#### A masked pattern was here #### 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 @@ -273,6 +229,50 @@ NULL NULL NULL 123456789.0123456789 246913578.0246913578 41152263.004115226300 1234567890.1234560000 2469135780.2469120000 411522630.041152000000 1234567890.1234567890 2469135780.2469135780 411522630.041152263000 +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL PREHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision_n0 @@ -281,50 +281,6 @@ POSTHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION_n0 ORDER BY `dec POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_n0 #### A masked pattern was here #### -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL 0.0000000000 0.000000000000 0.0000000000 0.000000000000 0.0000000000 0.000000000000 @@ -356,14 +312,6 @@ NULL NULL 123456789.0123456789 13717421.001371742100 1234567890.1234560000 137174210.013717333333 1234567890.1234567890 137174210.013717421000 -PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_n0 -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_n0 -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -408,6 +356,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_n0 ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_n0 +#### A masked pattern was here #### 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 @@ -439,14 +395,6 @@ NULL NULL 123456789.0123456789 4572473.6671239140333 1234567890.1234560000 45724736.6712391111111 1234567890.1234567890 45724736.6712391403333 -PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_n0 ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_n0 -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_n0 ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_n0 -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -491,6 +439,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_n0 ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_n0 +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_n0 ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_n0 +#### A masked pattern was here #### 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 @@ -522,6 +478,50 @@ NULL NULL 123456789.0123456789 15241578753238836.75019051998750191 1234567890.1234560000 1524157875323881726.87092138393600000 1234567890.1234567890 1524157875323883675.01905199875019052 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL PREHOOK: query: EXPLAIN SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION_n0 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION_n0 diff --git ql/src/test/results/clientpositive/decimal_serde.q.out ql/src/test/results/clientpositive/decimal_serde.q.out index fb7432458a..ec90dd3a2f 100644 --- ql/src/test/results/clientpositive/decimal_serde.q.out +++ ql/src/test/results/clientpositive/decimal_serde.q.out @@ -44,7 +44,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_TEXT ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_text #### A masked pattern was here #### -NULL 0 -1234567890 -1234567890 -4400 4400 -1255 -1255 @@ -82,6 +81,7 @@ NULL 0 125 125 200 200 1234567890 1234567890 +NULL 0 PREHOOK: query: CREATE TABLE DECIMAL_RC STORED AS RCFile AS SELECT * FROM DECIMAL_TEXT @@ -218,7 +218,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_SEQUENCE ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_sequence #### A masked pattern was here #### -NULL 0 -1234567890 -1234567890 -4400 4400 -1255 -1255 @@ -256,6 +255,7 @@ NULL 0 125 125 200 200 1234567890 1234567890 +NULL 0 PREHOOK: query: DROP TABLE IF EXISTS DECIMAL_TEXT PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_text diff --git ql/src/test/results/clientpositive/delete_all_partitioned.q.out ql/src/test/results/clientpositive/delete_all_partitioned.q.out index 90f8753687..4c1a024683 100644 --- ql/src/test/results/clientpositive/delete_all_partitioned.q.out +++ ql/src/test/results/clientpositive/delete_all_partitioned.q.out @@ -48,7 +48,6 @@ POSTHOOK: Input: default@acid_dap@ds=tomorrow -1070883071 0ruyd6Y50JpdGRf6HqD today -1070551679 iUR3Q today -1069736047 k17Am8uPHWk02cEf1jet today -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow @@ -58,6 +57,7 @@ POSTHOOK: Input: default@acid_dap@ds=tomorrow 6981 o4lvY20511w0EOX3P3I82p63 tomorrow 6981 o5mb0QP5Y48Qd4vdB0 tomorrow 6981 sF2CRfgt2K tomorrow +6981 NULL tomorrow PREHOOK: query: delete from acid_dap PREHOOK: type: QUERY PREHOOK: Input: default@acid_dap diff --git ql/src/test/results/clientpositive/distinct_windowing.q.out ql/src/test/results/clientpositive/distinct_windowing.q.out index 39b87e8b87..b32786ea03 100644 --- ql/src/test/results/clientpositive/distinct_windowing.q.out +++ ql/src/test/results/clientpositive/distinct_windowing.q.out @@ -83,7 +83,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -202,7 +202,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -326,7 +326,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/distinct_windowing_no_cbo.q.out ql/src/test/results/clientpositive/distinct_windowing_no_cbo.q.out index c1fe3a2ece..6196b28288 100644 --- ql/src/test/results/clientpositive/distinct_windowing_no_cbo.q.out +++ ql/src/test/results/clientpositive/distinct_windowing_no_cbo.q.out @@ -83,7 +83,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -202,7 +202,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -326,7 +326,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -539,7 +539,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -680,7 +680,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out index 619b95e23a..2d96d73ad0 100644 --- ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out +++ ql/src/test/results/clientpositive/druid/druidkafkamini_basic.q.out @@ -253,6 +253,8 @@ STAGE PLANS: tag: 0 value expressions: _col0 (type: string) auto parallelism: true + Execution mode: llap + LLAP IO: no inputs Path -> Alias: hdfs://### HDFS PATH ### [languages] Path -> Partition: @@ -328,6 +330,8 @@ STAGE PLANS: tag: 1 value expressions: user (type: string) auto parallelism: true + Execution mode: llap + LLAP IO: no inputs Path -> Alias: hdfs://### HDFS PATH ### [druid_kafka_test] Path -> Partition: @@ -420,6 +424,7 @@ STAGE PLANS: Truncated Path -> Alias: /druid_kafka_test [druid_kafka_test] Reducer 2 + Execution mode: llap Needs Tagging: false Reduce Operator Tree: Merge Join Operator diff --git ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out index 6090472a6a..284c80d9cf 100644 --- ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out +++ ql/src/test/results/clientpositive/druid/druidmini_extractTime.q.out @@ -607,7 +607,7 @@ STAGE PLANS: properties: druid.fieldNames vc,vc0 druid.fieldTypes int,string - druid.query.json {"queryType":"scan","dataSource":"default.druid_table_alltypesorc","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"bound","dimension":"__time","lower":"1969","lowerStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"yyyy","timeZone":"US/Pacific","locale":"en-US"}},{"type":"expression","expression":"(CAST(timestamp_extract(\"__time\",'YEAR','US/Pacific'), 'STRING') == '1969')"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"substring(timestamp_format(timestamp_floor(timestamp_parse(timestamp_format(\"__time\",'yyyy-MM-dd\\u0027T\\u0027HH:mm:ss.SSS\\u0027Z\\u0027','US/Pacific'),'','UTC'),'P1D','','UTC'),'yyyy-MM-dd','UTC'), 0, 4)","outputType":"STRING"}],"columns":["vc","vc0"],"resultFormat":"compactedList","limit":1} + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_alltypesorc","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"bound","dimension":"__time","lower":"1969-01-01T08:00:00.000Z","lowerStrict":false,"ordering":"lexicographic","extractionFn":{"type":"timeFormat","format":"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'","timeZone":"UTC"}},{"type":"expression","expression":"(CAST(timestamp_extract(\"__time\",'YEAR','US/Pacific'), 'STRING') == '1969')"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"substring(timestamp_format(timestamp_floor(timestamp_parse(timestamp_format(\"__time\",'yyyy-MM-dd\\u0027T\\u0027HH:mm:ss.SSS\\u0027Z\\u0027','US/Pacific'),'','UTC'),'P1D','','UTC'),'yyyy-MM-dd','UTC'), 0, 4)","outputType":"STRING"}],"columns":["vc","vc0"],"resultFormat":"compactedList","limit":1} druid.query.type scan Select Operator expressions: vc (type: int), vc0 (type: string) diff --git ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out index 21b5bdf642..69184633f8 100644 --- ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out +++ ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out @@ -608,7 +608,7 @@ STAGE PLANS: properties: druid.fieldNames vc,vc0 druid.fieldTypes int,string - druid.query.json {"queryType":"scan","dataSource":"default.druid_table_alltypesorc","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"bound","dimension":"__time","lower":"1969","lowerStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"yyyy","timeZone":"US/Pacific","locale":"en-US"}},{"type":"expression","expression":"(CAST(timestamp_extract(\"__time\",'YEAR','US/Pacific'), 'STRING') == '1969')"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"substring(timestamp_format(timestamp_floor(timestamp_parse(timestamp_format(\"__time\",'yyyy-MM-dd\\u0027T\\u0027HH:mm:ss.SSS\\u0027Z\\u0027','US/Pacific'),'','UTC'),'P1D','','UTC'),'yyyy-MM-dd','UTC'), 0, 4)","outputType":"STRING"}],"columns":["vc","vc0"],"resultFormat":"compactedList","limit":1} + druid.query.json {"queryType":"scan","dataSource":"default.druid_table_alltypesorc","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"bound","dimension":"__time","lower":"1969-01-01T08:00:00.000Z","lowerStrict":false,"ordering":"lexicographic","extractionFn":{"type":"timeFormat","format":"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'","timeZone":"UTC"}},{"type":"expression","expression":"(CAST(timestamp_extract(\"__time\",'YEAR','US/Pacific'), 'STRING') == '1969')"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"substring(timestamp_format(timestamp_floor(timestamp_parse(timestamp_format(\"__time\",'yyyy-MM-dd\\u0027T\\u0027HH:mm:ss.SSS\\u0027Z\\u0027','US/Pacific'),'','UTC'),'P1D','','UTC'),'yyyy-MM-dd','UTC'), 0, 4)","outputType":"STRING"}],"columns":["vc","vc0"],"resultFormat":"compactedList","limit":1} druid.query.type scan Select Operator expressions: vc (type: int), vc0 (type: string) diff --git ql/src/test/results/clientpositive/fullouter_mapjoin_1.q.out ql/src/test/results/clientpositive/fullouter_mapjoin_1.q.out new file mode 100644 index 0000000000..0ae9df92f2 --- /dev/null +++ ql/src/test/results/clientpositive/fullouter_mapjoin_1.q.out @@ -0,0 +1,176 @@ +PREHOOK: query: CREATE TABLE fullouter_long_big_1a(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a +PREHOOK: query: CREATE TABLE fullouter_long_small_1a(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE + TableScan + alias: s + Statistics: Num rows: 1 Data size: 1640 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 1640 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 1 Data size: 1640 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: bigint) + 1 _col0 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 225 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 225 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 diff --git ql/src/test/results/clientpositive/groupby_grouping_window.q.out ql/src/test/results/clientpositive/groupby_grouping_window.q.out index f50b8dc662..63a9d0cb6d 100644 --- ql/src/test/results/clientpositive/groupby_grouping_window.q.out +++ ql/src/test/results/clientpositive/groupby_grouping_window.q.out @@ -103,7 +103,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/infer_join_preds.q.out ql/src/test/results/clientpositive/infer_join_preds.q.out index 57f651168c..eefc68ba7f 100644 --- ql/src/test/results/clientpositive/infer_join_preds.q.out +++ ql/src/test/results/clientpositive/infer_join_preds.q.out @@ -460,7 +460,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/input_part7.q.out ql/src/test/results/clientpositive/input_part7.q.out index 23f3fd7c05..28eeccdb8c 100644 --- ql/src/test/results/clientpositive/input_part7.q.out +++ ql/src/test/results/clientpositive/input_part7.q.out @@ -43,7 +43,7 @@ STAGE PLANS: Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -69,7 +69,7 @@ STAGE PLANS: Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/insert_values_non_partitioned.q.out ql/src/test/results/clientpositive/insert_values_non_partitioned.q.out index 38193510c4..513f7e533b 100644 --- ql/src/test/results/clientpositive/insert_values_non_partitioned.q.out +++ ql/src/test/results/clientpositive/insert_values_non_partitioned.q.out @@ -65,6 +65,6 @@ POSTHOOK: query: select * from acid_ivnp order by ti POSTHOOK: type: QUERY POSTHOOK: Input: default@acid_ivnp #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 257 65537 4294967297 3.14 3.141592654 109.23 2014-08-25 17:21:30 2014-08-25 true mary had a little lamb ring around the rosie red 3 25 6553 NULL 0.14 1923.141592654 1.23 2014-08-24 17:21:30 2014-08-26 false its fleece was white as snow a pocket full of posies blue +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL diff --git ql/src/test/results/clientpositive/join18.q.out ql/src/test/results/clientpositive/join18.q.out index 9d82aa012e..98a4f5f346 100644 --- ql/src/test/results/clientpositive/join18.q.out +++ ql/src/test/results/clientpositive/join18.q.out @@ -88,7 +88,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/join18_multi_distinct.q.out ql/src/test/results/clientpositive/join18_multi_distinct.q.out index a95e9b16c4..1c5ad14037 100644 --- ql/src/test/results/clientpositive/join18_multi_distinct.q.out +++ ql/src/test/results/clientpositive/join18_multi_distinct.q.out @@ -90,7 +90,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/join45.q.out ql/src/test/results/clientpositive/join45.q.out index cbabf7fa1a..77dbaa2cd8 100644 --- ql/src/test/results/clientpositive/join45.q.out +++ ql/src/test/results/clientpositive/join45.q.out @@ -1365,7 +1365,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1470,7 +1470,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/join46.q.out ql/src/test/results/clientpositive/join46.q.out index dbaddcc68b..2f89a5225f 100644 --- ql/src/test/results/clientpositive/join46.q.out +++ ql/src/test/results/clientpositive/join46.q.out @@ -1423,7 +1423,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1522,7 +1522,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1619,7 +1619,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1718,7 +1718,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col1 (type: int) 1 _col1 (type: int) @@ -1876,7 +1876,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 diff --git ql/src/test/results/clientpositive/join47.q.out ql/src/test/results/clientpositive/join47.q.out index 6f529d603f..2536f7f4b6 100644 --- ql/src/test/results/clientpositive/join47.q.out +++ ql/src/test/results/clientpositive/join47.q.out @@ -1347,7 +1347,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1452,7 +1452,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/join6.q.out ql/src/test/results/clientpositive/join6.q.out index 055eee2c26..1135cef5e1 100644 --- ql/src/test/results/clientpositive/join6.q.out +++ ql/src/test/results/clientpositive/join6.q.out @@ -83,7 +83,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/join7.q.out ql/src/test/results/clientpositive/join7.q.out index 0339c90fad..0bc6f8948c 100644 --- ql/src/test/results/clientpositive/join7.q.out +++ ql/src/test/results/clientpositive/join7.q.out @@ -110,7 +110,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 0 to 2 keys: 0 _col0 (type: string) diff --git ql/src/test/results/clientpositive/join_filters_overlap.q.out ql/src/test/results/clientpositive/join_filters_overlap.q.out index 6557cac9f4..ffb8cd3b0c 100644 --- ql/src/test/results/clientpositive/join_filters_overlap.q.out +++ ql/src/test/results/clientpositive/join_filters_overlap.q.out @@ -780,7 +780,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 1 to 2 Left Outer Join 0 to 3 filter mappings: diff --git ql/src/test/results/clientpositive/limit_pushdown2.q.out ql/src/test/results/clientpositive/limit_pushdown2.q.out index 87be772d40..edfc042820 100644 --- ql/src/test/results/clientpositive/limit_pushdown2.q.out +++ ql/src/test/results/clientpositive/limit_pushdown2.q.out @@ -1143,23 +1143,23 @@ order by key, value limit 20 POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -NULL NULL 261.182 -NULL val_0 1.0 -NULL val_10 11.0 -NULL val_100 101.0 -NULL val_103 104.0 -NULL val_104 105.0 -NULL val_105 106.0 -NULL val_11 12.0 -NULL val_111 112.0 -NULL val_113 114.0 -NULL val_114 115.0 -NULL val_116 117.0 -NULL val_118 119.0 -NULL val_119 120.0 -NULL val_12 13.0 -NULL val_120 121.0 -NULL val_125 126.0 -NULL val_126 127.0 -NULL val_128 129.0 -NULL val_129 130.0 +0 val_0 1.0 +10 val_10 11.0 +100 val_100 101.0 +103 val_103 104.0 +104 val_104 105.0 +105 val_105 106.0 +11 val_11 12.0 +111 val_111 112.0 +113 val_113 114.0 +114 val_114 115.0 +116 val_116 117.0 +118 val_118 119.0 +119 val_119 120.0 +12 val_12 13.0 +120 val_120 121.0 +125 val_125 126.0 +126 val_126 127.0 +128 val_128 129.0 +129 val_129 130.0 +131 val_131 132.0 diff --git ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out index 4cfb1d9954..04518b39fd 100644 --- ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out +++ ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out @@ -1285,11 +1285,11 @@ STAGE PLANS: selectExpressions: StringGroupColConcatStringScalar(col 1:string, val updated) -> 6:string Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumns: 4:struct native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 6, 2] + partitionColumns: 5:int + valueColumns: 0:string, 6:string, 2:string Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -1314,7 +1314,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1492,11 +1492,11 @@ STAGE PLANS: projectedOutputColumnNums: [4, 2, 3] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumns: 4:struct native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [2, 3] + partitionColumns: 5:int + valueColumns: 2:string, 3:string Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -1521,7 +1521,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1653,11 +1653,10 @@ STAGE PLANS: projectedOutputColumnNums: [] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2, 3] - valueColumnNums: [] + partitionColumns: 0:string, 1:string, 2:string, 3:string Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -1683,11 +1682,11 @@ STAGE PLANS: vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ds:string, 3:hr:string, 4:ROW__ID:struct] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 3, 0, 1] + keyColumns: 2:string, 3:string, 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2, 3, 0, 1] - valueColumnNums: [4] + partitionColumns: 2:string, 3:string, 0:string, 1:string + valueColumns: 4:struct Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -1732,11 +1731,10 @@ STAGE PLANS: projectedOutputColumnNums: [] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2, 3] - valueColumnNums: [] + partitionColumns: 0:string, 1:string, 2:string, 3:string Select Vectorization: className: VectorSelectOperator native: true @@ -1766,12 +1764,15 @@ STAGE PLANS: className: VectorAppMasterEventOperator native: true Reducer 3 + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1794,7 +1795,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -2077,11 +2078,11 @@ STAGE PLANS: selectExpressions: StringGroupColConcatStringScalar(col 1:string, val updated) -> 6:string Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumns: 4:struct native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 6, 2] + partitionColumns: 5:int + valueColumns: 0:string, 6:string, 2:string Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -2106,7 +2107,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -2284,11 +2285,11 @@ STAGE PLANS: projectedOutputColumnNums: [4, 2, 3] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumns: 4:struct native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [2, 3] + partitionColumns: 5:int + valueColumns: 2:string, 3:string Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -2313,7 +2314,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -2446,11 +2447,10 @@ STAGE PLANS: projectedOutputColumnNums: [] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2, 3] - valueColumnNums: [] + partitionColumns: 0:string, 1:string, 2:string, 3:string Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -2476,11 +2476,11 @@ STAGE PLANS: vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ds:string, 3:hr:string, 4:ROW__ID:struct] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 3, 0, 1] + keyColumns: 2:string, 3:string, 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2, 3, 0, 1] - valueColumnNums: [4] + partitionColumns: 2:string, 3:string, 0:string, 1:string + valueColumns: 4:struct Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -2525,11 +2525,10 @@ STAGE PLANS: projectedOutputColumnNums: [] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2, 3] - valueColumnNums: [] + partitionColumns: 0:string, 1:string, 2:string, 3:string Select Vectorization: className: VectorSelectOperator native: true @@ -2559,12 +2558,15 @@ STAGE PLANS: className: VectorAppMasterEventOperator native: true Reducer 3 + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -2587,7 +2589,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out index 00be86c903..9178652a99 100644 --- ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out +++ ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out @@ -97,7 +97,6 @@ POSTHOOK: query: select distinct si, si%4 from over10k_n2 order by si POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k_n2 #### A masked pattern was here #### -NULL NULL 256 0 257 1 258 2 @@ -350,6 +349,7 @@ NULL NULL 509 1 510 2 511 3 +NULL NULL PREHOOK: query: insert into over10k_orc_bucketed select * from over10k_n2 PREHOOK: type: QUERY PREHOOK: Input: default@over10k_n2 diff --git ql/src/test/results/clientpositive/llap/auto_join_filters.q.out ql/src/test/results/clientpositive/llap/auto_join_filters.q.out index a63979280e..7290fac7ea 100644 --- ql/src/test/results/clientpositive/llap/auto_join_filters.q.out +++ ql/src/test/results/clientpositive/llap/auto_join_filters.q.out @@ -44,6 +44,113 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 3080335 +Warning: Shuffle Join MERGEJOIN[14][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +Warning: Shuffle Join MERGEJOIN[14][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 4 (CUSTOM_SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 {(VALUE._col0 > 40)} {(VALUE._col1 > 50)} {(VALUE._col0 = VALUE._col1)} + 1 {(VALUE._col0 > 40)} {(VALUE._col1 > 50)} {(VALUE._col0 = VALUE._col1)} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 17 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join MERGEJOIN[14][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY @@ -162,6 +269,142 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 3080335 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 {(KEY.reducesinkkey0 > 40)} {(VALUE._col0 > 50)} {(KEY.reducesinkkey0 = VALUE._col0)} + 1 {(VALUE._col0 > 40)} {(KEY.reducesinkkey0 > 50)} {(VALUE._col0 = KEY.reducesinkkey0)} + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n5 @@ -340,6 +583,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 4939870 +Warning: Shuffle Join MERGEJOIN[14][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n5 @@ -448,6 +701,142 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 3080335 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 {(KEY.reducesinkkey0 > 40)} {(VALUE._col0 > 50)} {(KEY.reducesinkkey0 = VALUE._col0)} + 1 {(VALUE._col0 > 40)} {(KEY.reducesinkkey0 > 50)} {(VALUE._col0 = KEY.reducesinkkey0)} + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n5 diff --git ql/src/test/results/clientpositive/llap/auto_join_nulls.q.out ql/src/test/results/clientpositive/llap/auto_join_nulls.q.out index 194fc5def3..a1604282ac 100644 --- ql/src/test/results/clientpositive/llap/auto_join_nulls.q.out +++ ql/src/test/results/clientpositive/llap/auto_join_nulls.q.out @@ -152,6 +152,139 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n2 #### A masked pattern was here #### 3079923 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value and a.key=b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value and a.key=b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n2 diff --git ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out index c3faba8fe6..01668fa9a5 100644 --- ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out +++ ql/src/test/results/clientpositive/llap/bucketmapjoin7.q.out @@ -249,7 +249,7 @@ STAGE PLANS: Statistics: Num rows: 72 Data size: 29216 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 72 Data size: 29216 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/llap/cbo_limit.q.out ql/src/test/results/clientpositive/llap/cbo_limit.q.out index c5825788e7..0d5c8f0e36 100644 --- ql/src/test/results/clientpositive/llap/cbo_limit.q.out +++ ql/src/test/results/clientpositive/llap/cbo_limit.q.out @@ -8,7 +8,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL NULL +1 4 12 PREHOOK: query: select x, y, count(*) from (select key, (c_int+c_float+1+2) as x, sum(c_int) as y from cbo_t1 group by c_float, cbo_t1.c_int, key) R group by y, x order by x,y limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 @@ -19,7 +19,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL 1 +5.0 2 2 PREHOOK: query: select key from(select key from (select key from cbo_t1 limit 5)cbo_t2 limit 5)cbo_t3 limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 @@ -45,8 +45,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL -NULL NULL +1 1 +1 1 1 1 1 1 1 1 diff --git ql/src/test/results/clientpositive/llap/cbo_rp_limit.q.out ql/src/test/results/clientpositive/llap/cbo_rp_limit.q.out index c5825788e7..0d5c8f0e36 100644 --- ql/src/test/results/clientpositive/llap/cbo_rp_limit.q.out +++ ql/src/test/results/clientpositive/llap/cbo_rp_limit.q.out @@ -8,7 +8,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL NULL +1 4 12 PREHOOK: query: select x, y, count(*) from (select key, (c_int+c_float+1+2) as x, sum(c_int) as y from cbo_t1 group by c_float, cbo_t1.c_int, key) R group by y, x order by x,y limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 @@ -19,7 +19,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL 1 +5.0 2 2 PREHOOK: query: select key from(select key from (select key from cbo_t1 limit 5)cbo_t2 limit 5)cbo_t3 limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 @@ -45,8 +45,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL -NULL NULL +1 1 +1 1 1 1 1 1 1 1 diff --git ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out index e83a370391..a9206f7070 100644 --- ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out +++ ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out @@ -119,5 +119,5 @@ POSTHOOK: query: select * from space order by ` left` POSTHOOK: type: QUERY POSTHOOK: Input: default@space #### A masked pattern was here #### -NULL 2 NULL 1 2 3 +NULL 2 NULL diff --git ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out index fddd2cbbfc..3d0053c152 100644 --- ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out +++ ql/src/test/results/clientpositive/llap/convert_decimal64_to_decimal.q.out @@ -207,10 +207,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -260,10 +259,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(9,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [19] + keyColumns: 19:decimal(9,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [14] + valueColumns: 14:smallint Statistics: Num rows: 950 Data size: 104800 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: smallint) Execution mode: vectorized, llap @@ -417,10 +416,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -471,10 +469,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(9,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [19] + keyColumns: 19:decimal(9,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [14] + valueColumns: 14:smallint Statistics: Num rows: 950 Data size: 104800 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: smallint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out index 0edeef947e..63163234a1 100644 --- ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out +++ ql/src/test/results/clientpositive/llap/correlationoptimizer1.q.out @@ -2213,7 +2213,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -2355,7 +2355,291 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 525 Data size: 3612 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +652447 510 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 5 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 525 Data size: 3612 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 17 Data size: 308 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +652447 510 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT x.key AS key, count(1) AS cnt + FROM src1 x FULL OUTER JOIN src y ON (x.key = y.key) + GROUP BY x.key) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 5 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out index b075ecf2f5..801948c72d 100644 --- ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out +++ ql/src/test/results/clientpositive/llap/correlationoptimizer2.q.out @@ -983,7 +983,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1153,7 +1153,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1229,6 +1229,910 @@ POSTHOOK: Input: default@src1 #### A masked pattern was here #### 12744278 500 652447 25 PREHOOK: query: EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (ONE_TO_ONE_EDGE), Reducer 6 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) + Reducer 6 <- Map 5 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: key, value + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count(value) + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 5 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: key, value + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count(value) + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 262 Data size: 2457 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int), hash(_col2) (type: int), hash(_col3) (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 262 Data size: 2457 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1), sum(_col2), sum(_col3) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Reducer 6 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +12744278 500 652447 25 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (ONE_TO_ONE_EDGE), Reducer 6 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) + Reducer 6 <- Map 5 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: key, value + Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count(value) + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 5 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: key, value + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count(value) + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 262 Data size: 2457 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int), hash(_col2) (type: int), hash(_col3) (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 262 Data size: 2457 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1), sum(_col2), sum(_col3) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Reducer 6 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.cnt AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key)) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +12744278 500 652447 25 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (ONE_TO_ONE_EDGE), Reducer 7 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) + Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE) + Reducer 7 <- Map 6 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 6 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 262 Data size: 1131 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 5 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Reducer 7 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +12744278 310 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (ONE_TO_ONE_EDGE), Reducer 7 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) + Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE) + Reducer 7 <- Map 6 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 6 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 262 Data size: 1131 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 5 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Reducer 7 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +12744278 310 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (ONE_TO_ONE_EDGE), Reducer 7 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) + Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE) + Reducer 7 <- Map 6 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 6 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: key + Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + keys: key (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 262 Data size: 1131 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 5 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Reducer 7 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +12744278 310 +PREHOOK: query: EXPLAIN SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT a.key AS key, count(1) AS cnt FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a @@ -1320,7 +2224,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1352,42 +2256,207 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator - aggregations: sum(_col0), sum(_col1) + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 5 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Reducer 7 + Execution mode: vectorized, llap + Reduce Operator Tree: + Group By Operator + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Input: default@src1 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT a.key AS key, count(1) AS cnt + FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a + FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b + ON (a.key = b.key) + GROUP BY a.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Input: default@src1 +#### A masked pattern was here #### +12744278 310 +PREHOOK: query: EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Reducer 4 (ONE_TO_ONE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) + Group By Operator + aggregations: count(value) + keys: key (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 5 + Map Operator Tree: + TableScan + alias: y + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 29 Data size: 7801 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int), hash(_col2) (type: int), hash(_col3) (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 29 Data size: 7801 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: sum(_col0), sum(_col1), sum(_col2), sum(_col3) mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: bigint), _col1 (type: bigint) - Reducer 5 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) + Reducer 3 Execution mode: vectorized, llap Reduce Operator Tree: Group By Operator - aggregations: sum(VALUE._col0), sum(VALUE._col1) + aggregations: sum(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3) mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Reducer 7 + Reducer 4 Execution mode: vectorized, llap Reduce Operator Tree: Group By Operator + aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial - outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) Stage: Stage-0 Fetch Operator @@ -1395,42 +2464,38 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) -FROM (SELECT a.key AS key, count(1) AS cnt - FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a - FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b - ON (a.key = b.key) - GROUP BY a.key) tmp +PREHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) -FROM (SELECT a.key AS key, count(1) AS cnt - FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a - FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b - ON (a.key = b.key) - GROUP BY a.key) tmp +POSTHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 #### A masked pattern was here #### -12744278 310 +1711763 3531902962 1711763 37 PREHOOK: query: EXPLAIN -SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) -FROM (SELECT a.key AS key, count(1) AS cnt - FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a - FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b - ON (a.key = b.key) - GROUP BY a.key) tmp +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN -SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) -FROM (SELECT a.key AS key, count(1) AS cnt - FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a - FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b - ON (a.key = b.key) - GROUP BY a.key) tmp +SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1441,141 +2506,136 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE) - Reducer 3 <- Reducer 2 (ONE_TO_ONE_EDGE), Reducer 7 (ONE_TO_ONE_EDGE) - Reducer 4 <- Reducer 3 (SIMPLE_EDGE) - Reducer 5 <- Reducer 4 (CUSTOM_SIMPLE_EDGE) - Reducer 7 <- Map 6 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (ONE_TO_ONE_EDGE) + Reducer 3 <- Map 6 (SIMPLE_EDGE), Reducer 2 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) + Reducer 5 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: x - Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 25 Data size: 4375 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string) Group By Operator + aggregations: count(value) keys: key (type: string) mode: hash - outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) Execution mode: vectorized, llap LLAP IO: no inputs Map 6 Map Operator Tree: TableScan alias: y - Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - expressions: key (type: string) - outputColumnNames: key - Statistics: Num rows: 25 Data size: 2150 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - keys: key (type: string) - mode: hash + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs Reducer 2 - Execution mode: vectorized, llap + Execution mode: llap Reduce Operator Tree: - Group By Operator - keys: KEY._col0 (type: string) - mode: mergepartial - outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Merge Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 18 Data size: 4842 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 4842 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: bigint) Reducer 3 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Inner Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) - outputColumnNames: _col0 - Statistics: Num rows: 262 Data size: 1131 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: count() - keys: _col0 (type: string) - mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col1 (type: bigint) - Reducer 4 - Execution mode: vectorized, llap - Reduce Operator Tree: - Group By Operator - aggregations: count(VALUE._col0) - keys: KEY._col0 (type: string) - mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 28 Data size: 7532 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: hash(_col0) (type: int), hash(_col1) (type: int) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 13 Data size: 278 Basic stats: COMPLETE Column stats: COMPLETE + expressions: hash(_col0) (type: int), hash(_col1) (type: int), hash(_col2) (type: int), hash(_col3) (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 28 Data size: 7532 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator - aggregations: sum(_col0), sum(_col1) + aggregations: sum(_col0), sum(_col1), sum(_col2), sum(_col3) mode: hash - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: bigint), _col1 (type: bigint) - Reducer 5 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) + Reducer 4 Execution mode: vectorized, llap Reduce Operator Tree: Group By Operator - aggregations: sum(VALUE._col0), sum(VALUE._col1) + aggregations: sum(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3) mode: mergepartial - outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Reducer 7 + Reducer 5 Execution mode: vectorized, llap Reduce Operator Tree: Group By Operator + aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: mergepartial - outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 12 Data size: 1032 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 12 Data size: 1128 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: bigint) Stage: Stage-0 Fetch Operator @@ -1583,27 +2643,25 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) -FROM (SELECT a.key AS key, count(1) AS cnt - FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a - FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b - ON (a.key = b.key) - GROUP BY a.key) tmp +PREHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@src1 #### A masked pattern was here #### -POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) -FROM (SELECT a.key AS key, count(1) AS cnt - FROM (SELECT x.key as key, count(x.value) AS cnt FROM src x group by x.key) a - FULL OUTER JOIN (SELECT y.key as key, count(y.value) AS cnt FROM src1 y group by y.key) b - ON (a.key = b.key) - GROUP BY a.key) tmp +POSTHOOK: query: SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) +FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 + FROM (SELECT x.key AS key, x.value AS val FROM src1 x JOIN src y ON (x.key = y.key)) a + JOIN (SELECT z.key AS key, count(z.value) AS cnt FROM src1 z group by z.key) b + ON (a.key = b.key)) tmp POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 #### A masked pattern was here #### -12744278 310 +1711763 3531902962 1711763 37 PREHOOK: query: EXPLAIN SELECT SUM(HASH(key1)), SUM(HASH(cnt1)), SUM(HASH(key2)), SUM(HASH(cnt2)) FROM (SELECT a.key AS key1, a.val AS cnt1, b.key AS key2, b.cnt AS cnt2 @@ -1790,9 +2848,10 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Reducer 4 (ONE_TO_ONE_EDGE) - Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) - Reducer 4 <- Map 1 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (ONE_TO_ONE_EDGE) + Reducer 3 <- Map 6 (SIMPLE_EDGE), Reducer 2 (ONE_TO_ONE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) + Reducer 5 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 @@ -1828,7 +2887,7 @@ STAGE PLANS: value expressions: _col1 (type: bigint) Execution mode: vectorized, llap LLAP IO: no inputs - Map 5 + Map 6 Map Operator Tree: TableScan alias: y @@ -1854,17 +2913,32 @@ STAGE PLANS: Merge Join Operator condition map: Inner Join 0 to 1 - Inner Join 0 to 2 keys: 0 _col0 (type: string) 1 _col0 (type: string) - 2 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 29 Data size: 7801 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 18 Data size: 4842 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 18 Data size: 4842 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: bigint) + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 28 Data size: 7532 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: hash(_col0) (type: int), hash(_col1) (type: int), hash(_col2) (type: int), hash(_col3) (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 29 Data size: 7801 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 28 Data size: 7532 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col0), sum(_col1), sum(_col2), sum(_col3) mode: hash @@ -1874,7 +2948,7 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) - Reducer 3 + Reducer 4 Execution mode: vectorized, llap Reduce Operator Tree: Group By Operator @@ -1889,7 +2963,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Reducer 4 + Reducer 5 Execution mode: vectorized, llap Reduce Operator Tree: Group By Operator diff --git ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out index f03e3d1f41..cf07ba9749 100644 --- ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out +++ ql/src/test/results/clientpositive/llap/correlationoptimizer4.q.out @@ -220,18 +220,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 13 10 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -266,6 +270,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -286,6 +299,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -306,6 +328,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -331,8 +362,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -355,6 +395,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -394,18 +440,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 13 10 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x JOIN T1_n146 y ON (x.key = y.key) JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -460,6 +510,15 @@ STAGE PLANS: value expressions: _col1 (type: bigint) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Map 4 Map Operator Tree: TableScan @@ -480,6 +539,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -500,8 +568,23 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -524,6 +607,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -563,18 +652,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 13 10 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY x.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY x.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -605,6 +698,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -621,6 +723,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -637,6 +748,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -662,8 +782,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -686,6 +815,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -725,18 +860,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 22 12 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY x.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT x.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY x.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -767,6 +906,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -783,6 +931,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -799,6 +956,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -824,8 +990,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -848,6 +1023,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -887,18 +1068,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 22 12 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x LEFT OUTER JOIN T1_n146 y ON (x.key = y.key) LEFT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -929,6 +1114,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -945,6 +1139,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -961,6 +1164,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -986,8 +1198,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -1010,6 +1231,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -1049,18 +1276,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 13 12 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT z.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY z.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT z.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY z.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1091,6 +1322,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -1107,6 +1347,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -1123,6 +1372,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1148,8 +1406,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -1172,6 +1439,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -1211,18 +1484,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 21 12 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT z.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY z.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT z.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY z.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1253,6 +1530,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -1269,6 +1555,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -1285,6 +1580,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1310,8 +1614,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -1334,6 +1647,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -1373,18 +1692,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 21 12 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x RIGHT OUTER JOIN T1_n146 y ON (x.key = y.key) RIGHT OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1415,6 +1738,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -1431,6 +1763,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -1447,6 +1788,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1472,8 +1822,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -1496,6 +1855,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -1535,18 +1900,22 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 21 12 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1577,6 +1946,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -1593,6 +1971,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -1609,13 +1996,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -1634,8 +2030,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -1658,6 +2063,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) @@ -1697,18 +2108,438 @@ POSTHOOK: Input: default@t2_n86 POSTHOOK: Input: default@t3_n34 #### A masked pattern was here #### 21 14 -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 5 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 6 + Map Operator Tree: + TableScan + alias: z + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + 2 _col0 (type: int) + outputColumnNames: _col1 + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: _col1 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n146 +PREHOOK: Input: default@t2_n86 +PREHOOK: Input: default@t3_n34 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n146 +POSTHOOK: Input: default@t2_n86 +POSTHOOK: Input: default@t3_n34 +#### A masked pattern was here #### +21 14 +PREHOOK: query: EXPLAIN VECTORIZATION +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE), Map 6 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: x + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 5 + Map Operator Tree: + TableScan + alias: y + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 6 + Map Operator Tree: + TableScan + alias: z + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + 2 _col0 (type: int) + outputColumnNames: _col1 + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: count() + keys: _col1 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + keys: KEY._col0 (type: int) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0) (type: int), hash(_col1) (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0), sum(_col1) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: bigint) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), sum(VALUE._col1) + mode: mergepartial + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n146 +PREHOOK: Input: default@t2_n86 +PREHOOK: Input: default@t3_n34 +#### A masked pattern was here #### +POSTHOOK: query: SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n146 +POSTHOOK: Input: default@t2_n86 +POSTHOOK: Input: default@t3_n34 +#### A masked pattern was here #### +21 14 +PREHOOK: query: EXPLAIN VECTORIZATION +SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) +FROM (SELECT y.key AS key, count(1) AS cnt + FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) + GROUP BY y.key) tmp +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION SELECT SUM(HASH(tmp.key)), SUM(HASH(tmp.cnt)) FROM (SELECT y.key AS key, count(1) AS cnt FROM T2_n86 x FULL OUTER JOIN T1_n146 y ON (x.key = y.key) FULL OUTER JOIN T3_n34 z ON (y.key = z.key) GROUP BY y.key) tmp POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1739,6 +2570,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 5 Map Operator Tree: TableScan @@ -1755,6 +2595,15 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 6 Map Operator Tree: TableScan @@ -1771,13 +2620,22 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -1796,8 +2654,17 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: true + vectorized: true Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0) @@ -1820,6 +2687,12 @@ STAGE PLANS: value expressions: _col0 (type: bigint), _col1 (type: bigint) Reducer 4 Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1) diff --git ql/src/test/results/clientpositive/llap/cross_prod_1.q.out ql/src/test/results/clientpositive/llap/cross_prod_1.q.out index df525c3d4b..bdc9323cf9 100644 --- ql/src/test/results/clientpositive/llap/cross_prod_1.q.out +++ ql/src/test/results/clientpositive/llap/cross_prod_1.q.out @@ -1973,7 +1973,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 diff --git ql/src/test/results/clientpositive/llap/delete_all_partitioned.q.out ql/src/test/results/clientpositive/llap/delete_all_partitioned.q.out index 90f8753687..4c1a024683 100644 --- ql/src/test/results/clientpositive/llap/delete_all_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/delete_all_partitioned.q.out @@ -48,7 +48,6 @@ POSTHOOK: Input: default@acid_dap@ds=tomorrow -1070883071 0ruyd6Y50JpdGRf6HqD today -1070551679 iUR3Q today -1069736047 k17Am8uPHWk02cEf1jet today -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow @@ -58,6 +57,7 @@ POSTHOOK: Input: default@acid_dap@ds=tomorrow 6981 o4lvY20511w0EOX3P3I82p63 tomorrow 6981 o5mb0QP5Y48Qd4vdB0 tomorrow 6981 sF2CRfgt2K tomorrow +6981 NULL tomorrow PREHOOK: query: delete from acid_dap PREHOOK: type: QUERY PREHOOK: Input: default@acid_dap diff --git ql/src/test/results/clientpositive/llap/explainuser_1.q.out ql/src/test/results/clientpositive/llap/explainuser_1.q.out index d99b3704c0..bc1f97dd49 100644 --- ql/src/test/results/clientpositive/llap/explainuser_1.q.out +++ ql/src/test/results/clientpositive/llap/explainuser_1.q.out @@ -2168,7 +2168,7 @@ Stage-0 Filter Operator [FIL_25] (rows=26 width=491) predicate:first_value_window_0 is not null PTF Operator [PTF_10] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_9] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 4 [SIMPLE_EDGE] llap @@ -2588,7 +2588,7 @@ Stage-0 Select Operator [SEL_4] (rows=20 width=64) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10"] PTF Operator [PTF_3] (rows=20 width=621) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Select Operator [SEL_2] (rows=20 width=621) Output:["_col0","_col1","_col2","_col3"] <-Map 1 [SIMPLE_EDGE] llap @@ -2615,7 +2615,7 @@ Stage-0 Select Operator [SEL_4] (rows=25 width=179) Output:["_col0","_col1","_col2"] PTF Operator [PTF_3] (rows=25 width=443) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Select Operator [SEL_2] (rows=25 width=443) Output:["_col0","_col1"] <-Map 1 [SIMPLE_EDGE] llap @@ -4101,14 +4101,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4147,14 +4147,14 @@ Stage-0 Select Operator [SEL_14] (rows=27 width=227) Output:["_col0","_col1","_col2","_col3"] PTF Operator [PTF_13] (rows=27 width=223) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_12] (rows=27 width=223) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_11] PartitionCols:_col2 PTF Operator [PTF_10] (rows=27 width=223) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_9] (rows=27 width=223) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap @@ -4209,14 +4209,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4260,14 +4260,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] PTF Operator [PTF_6] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap @@ -4314,7 +4314,7 @@ Stage-0 Select Operator [SEL_12] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] PTF Operator [PTF_11] (rows=26 width=223) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Group By Operator [GBY_8] (rows=26 width=223) Output:["_col0","_col1","_col2"],keys:KEY._col0, KEY._col1, KEY._col2 <-Reducer 3 [SIMPLE_EDGE] llap @@ -4328,7 +4328,7 @@ Stage-0 Select Operator [SEL_4] (rows=26 width=491) Output:["_col1","_col2","_col5"] PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap @@ -4374,7 +4374,7 @@ Stage-0 <-Filter Operator [FIL_14] (rows=26 width=887) predicate:_col0 is not null PTF Operator [PTF_4] (rows=26 width=887) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=887) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8"] <-Map 1 [SIMPLE_EDGE] llap @@ -4412,21 +4412,21 @@ Stage-0 Select Operator [SEL_8] (rows=26 width=227) Output:["_col0","_col1","_col2","_col3"] PTF Operator [PTF_7] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST, _col5 DESC NULLS LAST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST, _col5 DESC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_6] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_5] PartitionCols:_col2 PTF Operator [PTF_4] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS FIRST, _col5 DESC NULLS LAST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS LAST, _col5 DESC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap SHUFFLE [RS_2] PartitionCols:p_mfgr PTF Operator [PTF_1] (rows=26 width=223) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS FIRST, p_size DESC NULLS LAST","partition by:":"p_mfgr"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS LAST, p_size DESC NULLS LAST","partition by:":"p_mfgr"}}] TableScan [TS_0] (rows=26 width=223) default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size"] @@ -4463,21 +4463,21 @@ Stage-0 Select Operator [SEL_8] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_7] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_6] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_5] PartitionCols:_col2 PTF Operator [PTF_4] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap SHUFFLE [RS_2] PartitionCols:p_mfgr PTF Operator [PTF_1] (rows=26 width=231) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS FIRST","partition by:":"p_mfgr"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS LAST","partition by:":"p_mfgr"}}] TableScan [TS_0] (rows=26 width=231) default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size","p_retailprice"] @@ -4514,14 +4514,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4566,23 +4566,23 @@ Stage-0 Select Operator [SEL_11] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_10] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_9] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 3 [SIMPLE_EDGE] llap SHUFFLE [RS_8] PartitionCols:_col2 PTF Operator [PTF_7] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_6] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_5] PartitionCols:_col2 PTF Operator [PTF_4] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}}] PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4632,14 +4632,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=235) Output:["_col0","_col1","_col2","_col3"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4687,7 +4687,7 @@ Stage-0 Select Operator [SEL_13] (rows=27 width=259) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8"] PTF Operator [PTF_12] (rows=27 width=767) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_11] (rows=27 width=767) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap @@ -4705,7 +4705,7 @@ Stage-0 <-Filter Operator [FIL_18] (rows=26 width=503) predicate:_col0 is not null PTF Operator [PTF_4] (rows=26 width=503) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=503) Output:["_col0","_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4845,14 +4845,14 @@ Stage-4 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [SIMPLE_EDGE] llap @@ -4879,7 +4879,7 @@ Stage-4 Select Operator [SEL_25] (rows=26 width=247) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"] PTF Operator [PTF_24] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST","partition by:":"_col3"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col3 ASC NULLS LAST, _col2 ASC NULLS LAST","partition by:":"_col3"}] Select Operator [SEL_23] (rows=26 width=499) Output:["_col0","_col2","_col3","_col6"] <-Reducer 6 [SIMPLE_EDGE] llap @@ -4888,7 +4888,7 @@ Stage-4 Select Operator [SEL_21] (rows=26 width=491) Output:["sum_window_0","_col1","_col2","_col5"] PTF Operator [PTF_20] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_19] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap @@ -4964,16 +4964,16 @@ Stage-0 SHUFFLE [RS_8] PartitionCols:_col2, _col1 PTF Operator [PTF_7] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] Select Operator [SEL_6] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_5] PartitionCols:_col2, _col1 PTF Operator [PTF_4] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap @@ -5033,28 +5033,28 @@ Stage-0 Select Operator [SEL_13] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_12] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_11] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 4 [SIMPLE_EDGE] llap SHUFFLE [RS_10] PartitionCols:_col2 PTF Operator [PTF_9] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_8] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 3 [SIMPLE_EDGE] llap SHUFFLE [RS_7] PartitionCols:_col2 PTF Operator [PTF_6] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] Select Operator [SEL_5] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2, _col1 PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap @@ -5109,21 +5109,21 @@ Stage-0 Select Operator [SEL_10] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_9] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_8] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 3 [SIMPLE_EDGE] llap SHUFFLE [RS_7] PartitionCols:_col2 PTF Operator [PTF_6] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_5] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [SIMPLE_EDGE] llap SHUFFLE [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [SIMPLE_EDGE] llap diff --git ql/src/test/results/clientpositive/llap/explainuser_4.q.out ql/src/test/results/clientpositive/llap/explainuser_4.q.out index c650698d19..aa65d4f8f7 100644 --- ql/src/test/results/clientpositive/llap/explainuser_4.q.out +++ ql/src/test/results/clientpositive/llap/explainuser_4.q.out @@ -270,7 +270,7 @@ Stage-0 <-Reducer 2 [SIMPLE_EDGE] vectorized, llap SHUFFLE [RS_36] Map Join Operator [MAPJOIN_35] (rows=1501 width=236) - Conds:RS_31.KEY.reducesinkkey0=RS_34.KEY.reducesinkkey0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"] + Conds:RS_31.KEY.reducesinkkey0=RS_34.KEY.reducesinkkey0(Inner),DynamicPartitionHashJoin:true,HybridGraceHashJoin:true,Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"] <-Map 4 [CUSTOM_SIMPLE_EDGE] vectorized, llap PARTITION_ONLY_SHUFFLE [RS_34] PartitionCols:_col2 @@ -351,7 +351,7 @@ Stage-0 Group By Operator [GBY_38] (rows=1 width=8) Output:["_col0"],aggregations:["count()"] Map Join Operator [MAPJOIN_37] (rows=1501 width=236) - Conds:RS_33.KEY.reducesinkkey0=RS_36.KEY.reducesinkkey0(Inner),HybridGraceHashJoin:true + Conds:RS_33.KEY.reducesinkkey0=RS_36.KEY.reducesinkkey0(Inner),DynamicPartitionHashJoin:true,HybridGraceHashJoin:true <-Map 4 [CUSTOM_SIMPLE_EDGE] vectorized, llap PARTITION_ONLY_SHUFFLE [RS_36] PartitionCols:_col0 @@ -431,7 +431,7 @@ Stage-0 Group By Operator [GBY_40] (rows=1501 width=236) Output:["_col0","_col1"],aggregations:["count()"],keys:_col0 Map Join Operator [MAPJOIN_39] (rows=1501 width=236) - Conds:RS_35.KEY.reducesinkkey0=RS_38.KEY.reducesinkkey0(Inner),HybridGraceHashJoin:true,Output:["_col0"] + Conds:RS_35.KEY.reducesinkkey0=RS_38.KEY.reducesinkkey0(Inner),DynamicPartitionHashJoin:true,HybridGraceHashJoin:true,Output:["_col0"] <-Map 5 [CUSTOM_SIMPLE_EDGE] vectorized, llap PARTITION_ONLY_SHUFFLE [RS_38] PartitionCols:_col0 diff --git ql/src/test/results/clientpositive/llap/fullouter_mapjoin_1_optimized.q.out ql/src/test/results/clientpositive/llap/fullouter_mapjoin_1_optimized.q.out new file mode 100644 index 0000000000..c387af5db2 --- /dev/null +++ ql/src/test/results/clientpositive/llap/fullouter_mapjoin_1_optimized.q.out @@ -0,0 +1,3139 @@ +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: Lineage: fullouter_long_big_1a.key SIMPLE [(fullouter_long_big_1a_txt)fullouter_long_big_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: Lineage: fullouter_long_big_1a_nonull.key SIMPLE [(fullouter_long_big_1a_nonull_txt)fullouter_long_big_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: Lineage: fullouter_long_small_1a.key SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a.s_date SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.key SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.s_date SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: bigint) + 1 _col0 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: bigint) + 1 KEY.reducesinkkey0 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +PREHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: smallint) + outputColumnNames: _col0 + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: smallint), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: timestamp) + Execution mode: llap + LLAP IO: no inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint) + 1 KEY.reducesinkkey0 (type: smallint) + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: smallint), _col2 (type: timestamp) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), VALUE._col0 (type: smallint), VALUE._col1 (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +-25394 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +32030 32030 2101-09-09 07:35:05.145 +NULL -14172 1918-09-13 11:44:24.496926711 +NULL -14172 2355-01-14 23:23:34 +NULL -14172 2809-06-07 02:10:58 +NULL -15361 2219-09-15 20:15:03.000169887 +NULL -15361 2434-08-13 20:37:07.000172979 +NULL -15427 2023-11-09 19:31:21 +NULL -15427 2046-06-07 22:58:40.728 +NULL -15427 2355-01-08 12:34:11.617 +NULL -19167 2230-12-22 20:25:39.000242111 +NULL -19167 2319-08-26 11:07:11.268 +NULL -20517 2233-12-20 04:06:56.666522799 +NULL -20517 2774-06-23 12:04:06.5 +NULL -20824 2478-11-05 00:28:05 +NULL -22422 1949-03-13 00:07:53.075 +NULL -22422 2337-07-19 06:33:02.000353352 +NULL -22422 2982-12-28 06:30:26.000883228 +NULL -23117 2037-01-05 21:52:30.685952759 +NULL -24775 2035-03-26 08:11:23.375224153 +NULL -24775 2920-08-06 15:58:28.261059449 +NULL -26998 2268-08-04 12:48:11.848006292 +NULL -26998 2428-12-26 07:53:45.96925825 +NULL -26998 2926-07-18 09:02:46.077 +NULL -29600 2333-11-02 15:06:30 +NULL -30059 2269-05-04 21:23:44.000339209 +NULL -30059 2420-12-10 22:12:30 +NULL -30059 2713-10-13 09:28:49 +NULL -30306 2619-05-24 10:35:58.000774018 +NULL -4279 2214-09-10 03:53:06 +NULL -4279 2470-08-12 11:21:14.000955747 +NULL -7373 2662-10-28 12:07:02.000526564 +NULL -7624 2219-12-03 17:07:19 +NULL -7624 2289-08-28 00:14:34 +NULL -7624 2623-03-20 03:18:45.00006465 +NULL -8087 2550-06-26 23:57:42.588007617 +NULL -8087 2923-07-02 11:40:26.115 +NULL -8435 2642-02-07 11:45:04.353231638 +NULL -8435 2834-12-06 16:38:18.901 +NULL -8624 2120-02-15 15:36:40.000758423 +NULL -8624 2282-03-28 07:58:16 +NULL -8624 2644-05-04 04:45:07.839 +NULL 10553 2168-05-05 21:10:59.000152113 +NULL 11232 2038-04-06 14:53:59 +NULL 11232 2507-01-27 22:04:22.49661421 +NULL 11232 2533-11-26 12:22:18 +NULL 13598 2421-05-20 14:18:31.000264698 +NULL 13598 2909-06-25 23:22:50 +NULL 14865 2079-10-06 16:54:35.117 +NULL 14865 2220-02-28 03:41:36 +NULL 14865 2943-03-21 00:42:10.505 +NULL 17125 2236-07-14 01:54:40.927230276 +NULL 17125 2629-11-15 15:34:52 +NULL 21181 2253-03-12 11:55:48.332 +NULL 21181 2434-02-20 00:46:29.633 +NULL 21436 2526-09-22 23:44:55 +NULL 21436 2696-05-08 05:19:24.112 +NULL 24870 2752-12-26 12:32:23.03685163 +NULL 2632 2561-12-15 15:42:27 +NULL 26484 1919-03-04 07:32:37.519 +NULL 26484 2953-03-10 02:05:26.508953676 +NULL 2748 2298-06-20 21:01:24 +NULL 2748 2759-02-13 18:04:36.000307355 +NULL 2748 2862-04-20 13:12:39.482805897 +NULL 29407 2385-12-14 06:03:39.597 +NULL 3198 2223-04-14 13:20:49 +NULL 3198 2428-06-13 16:21:33.955 +NULL 3198 2736-12-20 03:59:50.343550301 +NULL 4510 2293-01-17 13:47:41.00001006 +NULL 4510 2777-03-24 03:44:28.000169723 +NULL NULL 2124-05-07 15:01:19.021 +NULL NULL 2933-06-20 11:48:09.000839488 +NULL NULL 2971-08-07 12:02:11.000948152 +NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), b_string (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(38,18)) + Execution mode: llap + LLAP IO: no inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: decimal(38,18)) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: int), VALUE._col2 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +-1437463633 JU NULL NULL +-1437463633 NULL NULL NULL +-1437463633 SOWDWMS NULL NULL +-1437463633 TKTKGVGFW NULL NULL +-1437463633 YYXPPCH NULL NULL +1725068083 MKSCCE NULL NULL +1928928239 AMKTIWQ NULL NULL +1928928239 NULL NULL NULL +1928928239 NULL NULL NULL +1928928239 VAQHVRI NULL NULL +NULL ABBZ NULL NULL +NULL NULL -1093006502 -69.556658280000000000 +NULL NULL -1197550983 -0.558879692200000000 +NULL NULL -1197550983 0.100000000000000000 +NULL NULL -1197550983 71852.833867441261300000 +NULL NULL -1250662632 -544.554649000000000000 +NULL NULL -1250662632 5454127198.951479000000000000 +NULL NULL -1250662632 93104.000000000000000000 +NULL NULL -1264372462 -6993985240226.000000000000000000 +NULL NULL -1264372462 -899.000000000000000000 +NULL NULL -1264372462 0.883000000000000000 +NULL NULL -1490239076 92253.232096000000000000 +NULL NULL -1681455031 -11105.372477000000000000 +NULL NULL -1681455031 -6.454300000000000000 +NULL NULL -1740848088 -9.157000000000000000 +NULL NULL -1740848088 0.506394259000000000 +NULL NULL -1740848088 901.441000000000000000 +NULL NULL -2048404259 -0.322296044625100000 +NULL NULL -2048404259 3939387044.100000000000000000 +NULL NULL -2123273881 -55.891980000000000000 +NULL NULL -2123273881 3.959000000000000000 +NULL NULL -243940373 -583.258000000000000000 +NULL NULL -243940373 -97176129669.654953000000000000 +NULL NULL -369457052 560.119078830904550000 +NULL NULL -369457052 7.700000000000000000 +NULL NULL -424713789 0.480000000000000000 +NULL NULL -466171792 0.000000000000000000 +NULL NULL -466171792 4227.534400000000000000 +NULL NULL -466171792 69.900000000000000000 +NULL NULL -477147437 6.000000000000000000 +NULL NULL -793950320 -0.100000000000000000 +NULL NULL -793950320 -16.000000000000000000 +NULL NULL -934092157 -7843850349.571300380000000000 +NULL NULL -99948814 -38076694.398100000000000000 +NULL NULL -99948814 -96386.438000000000000000 +NULL NULL 1039864870 0.700000000000000000 +NULL NULL 1039864870 94.040000000000000000 +NULL NULL 1039864870 987601.570000000000000000 +NULL NULL 1091836730 -5017.140000000000000000 +NULL NULL 1091836730 0.020000000000000000 +NULL NULL 1242586043 -4.000000000000000000 +NULL NULL 1242586043 -749975924224.630000000000000000 +NULL NULL 1242586043 71.148500000000000000 +NULL NULL 1479580778 92077343080.700000000000000000 +NULL NULL 150678276 -8278.000000000000000000 +NULL NULL 150678276 15989394.843600000000000000 +NULL NULL 1519948464 152.000000000000000000 +NULL NULL 1561921421 -5.405000000000000000 +NULL NULL 1561921421 53050.550000000000000000 +NULL NULL 1585021913 -5762331.066971120000000000 +NULL NULL 1585021913 607.227470000000000000 +NULL NULL 1585021913 745222.668089540000000000 +NULL NULL 1719049112 -7888197.000000000000000000 +NULL NULL 1738753776 -99817635066320.241600000000000000 +NULL NULL 1738753776 1525.280459649262000000 +NULL NULL 1755897735 -39.965207000000000000 +NULL NULL 1785750809 47443.115000000000000000 +NULL NULL 1801735854 -1760956929364.267000000000000000 +NULL NULL 1801735854 -438541294.700000000000000000 +NULL NULL 1816559437 -1035.700900000000000000 +NULL NULL 1909136587 -8610.078036935181000000 +NULL NULL 1909136587 181.076815359440000000 +NULL NULL 193709887 -0.566300000000000000 +NULL NULL 193709887 -19889.830000000000000000 +NULL NULL 193709887 0.800000000000000000 +NULL NULL 284554389 5.727146000000000000 +NULL NULL 294598722 -3542.600000000000000000 +NULL NULL 294598722 -9377326244.444000000000000000 +NULL NULL 448130683 -4302.485366846491000000 +NULL NULL 452719211 3020.293893074463600000 +NULL NULL 452719211 83003.437220000000000000 +NULL NULL 466567142 -58810.605860000000000000 +NULL NULL 466567142 -9763217822.129028000000000000 +NULL NULL 466567142 196.578529539858400000 +NULL NULL 560745412 678.250000000000000000 +NULL NULL 698032489 -330457.429262583900000000 +NULL NULL 891262439 -0.040000000000000000 +NULL NULL 90660785 -4564.517185000000000000 +NULL NULL 90660785 12590.288613000000000000 +NULL NULL NULL 1.089120893565337000 +NULL NULL NULL 4.261652270000000000 +NULL NULL NULL 682070836.264960300000000000 +PREHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: no inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 0 Map 1 + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +-1780951928 NULL +-2038654700 -2038654700 +-670834064 NULL +-702028721 NULL +-702028721 NULL +-702028721 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +NULL -1003639073 +NULL -1014271154 +NULL -1036083124 +NULL -1210744742 +NULL -1323620496 +NULL -1379355738 +NULL -1712018127 +NULL -1792852276 +NULL -1912571616 +NULL -497171161 +NULL -683339273 +NULL -707688773 +NULL -747044796 +NULL -894799664 +NULL -932176731 +NULL 103640700 +NULL 1164387380 +NULL 1372592319 +NULL 1431997749 +NULL 1614287784 +NULL 162858059 +NULL 1635405412 +NULL 1685473722 +NULL 1780951928 +NULL 1825107160 +NULL 1831520491 +NULL 1840266070 +NULL 1997943409 +NULL 2119085509 +NULL 246169862 +NULL 260588085 +NULL 41376947 +NULL 436878811 +NULL 533298451 +NULL 670834064 +NULL 699007128 +NULL 699863556 +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: Lineage: fullouter_multikey_big_1a.key0 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a.key1 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key0 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key1 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: Lineage: fullouter_multikey_small_1a.key0 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a.key1 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key0 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key1 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + 1 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: smallint), _col3 (type: int) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: Lineage: fullouter_multikey_big_1b.key0 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key1 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key2 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: Lineage: fullouter_multikey_small_1b.key0 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key1 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key2 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.s_decimal SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:s_decimal, type:decimal(38,18), comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + value expressions: _col3 (type: decimal(38,18)) + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + 1 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint) + sort order: ++ + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: string), _col3 (type: timestamp), _col4 (type: smallint), _col5 (type: string), _col6 (type: decimal(38,18)) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), VALUE._col0 (type: string), VALUE._col1 (type: timestamp), VALUE._col2 (type: smallint), VALUE._col3 (type: string), VALUE._col4 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b +PREHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b +POSTHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 21635 ANCO NULL NULL NULL NULL +2082-07-14 04:00:40.695380469 12556 NCYBDW NULL NULL NULL NULL +2093-04-10 23:36:54.846 1446 GHZVPWFO NULL NULL NULL NULL +2093-04-10 23:36:54.846 28996 Q NULL NULL NULL NULL +2093-04-10 23:36:54.846 NULL NULL NULL NULL NULL NULL +2188-06-04 15:03:14.963259704 9468 AAA 2188-06-04 15:03:14.963259704 9468 AAA 2.754963520000000000 +2299-11-15 16:41:30.401 -31077 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 -6909 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 1446 NULL NULL NULL NULL NULL +2608-02-23 23:44:02.546440891 26184 NCYBDW NULL NULL NULL NULL +2686-05-23 07:46:46.565832918 13212 NCYBDW 2686-05-23 07:46:46.565832918 13212 NCYBDW -917116793.400000000000000000 +2686-05-23 07:46:46.565832918 NULL GHZVPWFO NULL NULL NULL NULL +2898-10-01 22:27:02.000871113 10361 NCYBDW NULL NULL NULL NULL +NULL -6909 NULL NULL NULL NULL NULL +NULL 21635 ANCO NULL NULL NULL NULL +NULL NULL CCWYD NULL NULL NULL NULL +NULL NULL NULL 1905-04-20 13:42:25.000469776 2638 KAUUFF 7.000000000000000000 +NULL NULL NULL 1919-06-20 00:16:50.611028595 20223 ZKBC -23.000000000000000000 +NULL NULL NULL 1931-12-04 11:13:47.269597392 23196 HVJCQMTQL -9697532.899400000000000000 +NULL NULL NULL 1941-10-16 02:19:36.000423663 -24459 AO -821445414.457971200000000000 +NULL NULL NULL 1957-02-01 14:00:29.000548421 -16085 ZVEUKC -2312.814900000000000000 +NULL NULL NULL 1957-03-06 09:57:31 -26373 NXLNNSO 2.000000000000000000 +NULL NULL NULL 1980-09-13 19:57:15 NULL M 57650.772300000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 7506645.953700000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 98790.713907420831000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -22419 LOTLS 342.372604022858400000 +NULL NULL NULL 2038-10-12 09:15:33.000539653 -19598 YKNIAJW -642807895924.660000000000000000 +NULL NULL NULL 2044-05-02 07:00:03.35 -8751 ZSMB -453797242.029791752000000000 +NULL NULL NULL 2071-07-21 20:02:32.000250697 2638 NRUV -66198.351092000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 5.000000000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 726945733.419300000000000000 +NULL NULL NULL 2075-10-25 20:32:40.000792874 NULL NULL 226612651968.360760000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR -394.086700000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR 67892053.023760940000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV -85184687349898.892000000000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 0.439686100000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 482.538341135921900000 +NULL NULL NULL 2105-01-04 16:27:45 23100 ZSMB -83.232800000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 NULL -9784.820000000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 UANGISEXR -5996.306000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -1515597428.000000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -4016.960800000000000000 +NULL NULL NULL 2201-07-05 17:22:06.084206844 -24459 UBGT 1.506948328200000000 +NULL NULL NULL 2238-05-17 19:27:25.519 20223 KQCM -0.010950000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ -0.261490000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ 37.728800000000000000 +NULL NULL NULL 2266-09-26 06:27:29.000284762 20223 EDYJJN 14.000000000000000000 +NULL NULL NULL 2301-06-03 17:16:19 15332 ZVEUKC 0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 -13125 JFYW 6.086657000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR 1279917802.420000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 12587 OPW -4.594895040000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T -0.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 2720.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 61.302000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G -4319470286240016.300000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G 975.000000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 30285 GSJPSIYOU 0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO -0.435500000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO 0.713517473350000000 +NULL NULL NULL 2309-01-15 12:43:49 22821 ZMY 40.900000000000000000 +NULL NULL NULL 2332-06-14 07:02:42.32 -26373 XFFFDTQ 56845106806308.900000000000000000 +NULL NULL NULL 2333-07-28 09:59:26 23196 RKSK 37872288434740893.500000000000000000 +NULL NULL NULL 2338-02-12 09:30:07 20223 CTH -6154.763054000000000000 +NULL NULL NULL 2340-12-15 05:15:17.133588982 23663 HHTP 33383.800000000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 74179461.880493000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 92.150000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -32.460000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -472.000000000000000000 +NULL NULL NULL 2391-01-17 15:28:37.00045143 16160 ZVEUKC 771355639420297.133000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB -5151598.347000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB 0.767183260000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -162.950000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -9926693851.000000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR 0.400000000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR -769088.176482000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR 93262.914526611000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -9575827.553960000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -991.436050000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB 8694.890000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB -582687.000000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB 67.417990000000000000 +NULL NULL NULL 2467-05-11 06:04:13.426693647 23196 EIBSDASR -8.554888380100000000 +NULL NULL NULL 2480-10-02 09:31:37.000770961 -26373 NBN -5875.519725200000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ -49.512190000000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ 0.445800000000000000 +NULL NULL NULL 2512-10-06 03:03:03 13195 CRJ 14.000000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X -922.695158410700000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X 761196.522000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 24313 QBHUG -8423.151573236000000000 +NULL NULL NULL 2512-10-06 03:03:03 32099 ARNZ -0.410000000000000000 +NULL NULL NULL 2525-05-12 15:59:35 -24459 SAVRGA 53106747151.863300000000000000 +NULL NULL NULL 2535-03-01 05:04:49.000525883 23663 ALIQKNXHE -0.166569100000000000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 41.774515077866460000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 6.801285170800000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -12923 PPTJPFR 5.400000000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -17786 HYEGQ -84.169614329419000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 7362887891522.378200000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 749563668434009.650000000000000000 +NULL NULL NULL 2668-06-25 07:12:37.000970744 2638 TJE -2.779682700000000000 +NULL NULL NULL 2688-02-06 20:58:42.000947837 20223 PAIY 67661.735000000000000000 +NULL NULL NULL 2743-12-27 05:16:19.000573579 -12914 ZVEUKC -811984611.517849700000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 368.000000000000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 60.602579700000000000 +NULL NULL NULL 2808-07-09 02:10:11.928498854 -19598 FHFX 0.300000000000000000 +NULL NULL NULL 2829-06-04 08:01:47.836 22771 ZVEUKC 94317.753180000000000000 +NULL NULL NULL 2861-05-27 07:13:01.000848622 -19598 WKPXNLXS 29399.000000000000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -4244.926206619000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -9951044.000000000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC -56082455.033918000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC 57.621752577880370000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 51.732330327300000000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 6370.000000000000000000 +NULL NULL NULL 2898-12-18 03:37:17 -24459 MHNBXPBM 14.236693562384810000 +NULL NULL NULL 2913-07-17 15:06:58.041 -10206 NULL -0.200000000000000000 +NULL NULL NULL 2938-12-21 23:35:59.498 29362 ZMY 0.880000000000000000 +NULL NULL NULL 2957-05-07 10:41:46 20223 OWQT -586953.153681000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF -96.300000000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF 2.157765900000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -18138 VDPN 8924831210.427680190000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -32485 AGEPWWLJF -48431309405.652522000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -375994644577.315257000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -81.000000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ 9.178000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 14500 WXLTRFQP -23.819800000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 6689 TFGVOGPJF -0.010000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -27394351.300000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -5.751278023000000000 +NULL NULL NULL NULL -12914 ZVEUKC 221.000000000000000000 +NULL NULL NULL NULL NULL NULL -2.400000000000000000 +NULL NULL NULL NULL NULL NULL -2207.300000000000000000 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: Lineage: fullouter_string_big_1a.key SIMPLE [(fullouter_string_big_1a_txt)fullouter_string_big_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: Lineage: fullouter_string_big_1a_nonull.key SIMPLE [(fullouter_string_big_1a_nonull_txt)fullouter_string_big_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: Lineage: fullouter_string_small_1a.key SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_date SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_timestamp SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.key SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_date SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_timestamp SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: string), s_date (type: date), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: timestamp) + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: string) + 1 KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: date), _col3 (type: timestamp) + Reducer 4 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: date), VALUE._col2 (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/groupby_resolution.q.out ql/src/test/results/clientpositive/llap/groupby_resolution.q.out index 11bb452135..422d3c90d1 100644 --- ql/src/test/results/clientpositive/llap/groupby_resolution.q.out +++ ql/src/test/results/clientpositive/llap/groupby_resolution.q.out @@ -712,7 +712,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/insert_into_with_schema.q.out ql/src/test/results/clientpositive/llap/insert_into_with_schema.q.out index 95de940c70..13545d8f91 100644 --- ql/src/test/results/clientpositive/llap/insert_into_with_schema.q.out +++ ql/src/test/results/clientpositive/llap/insert_into_with_schema.q.out @@ -144,9 +144,9 @@ POSTHOOK: query: select * from target1 order by x,y,z POSTHOOK: type: QUERY POSTHOOK: Input: x314@target1 #### A masked pattern was here #### -NULL 1 2 1 2 NULL 2 NULL 1 +NULL 1 2 PREHOOK: query: select * from target2 order by x,y,z PREHOOK: type: QUERY PREHOOK: Input: x314@target2 @@ -155,8 +155,8 @@ POSTHOOK: query: select * from target2 order by x,y,z POSTHOOK: type: QUERY POSTHOOK: Input: x314@target2 #### A masked pattern was here #### -NULL 1 2 2 NULL 1 +NULL 1 2 PREHOOK: query: create table source2(s1 int, s2 int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:x314 diff --git ql/src/test/results/clientpositive/llap/insert_values_non_partitioned.q.out ql/src/test/results/clientpositive/llap/insert_values_non_partitioned.q.out index 38193510c4..513f7e533b 100644 --- ql/src/test/results/clientpositive/llap/insert_values_non_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/insert_values_non_partitioned.q.out @@ -65,6 +65,6 @@ POSTHOOK: query: select * from acid_ivnp order by ti POSTHOOK: type: QUERY POSTHOOK: Input: default@acid_ivnp #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 257 65537 4294967297 3.14 3.141592654 109.23 2014-08-25 17:21:30 2014-08-25 true mary had a little lamb ring around the rosie red 3 25 6553 NULL 0.14 1923.141592654 1.23 2014-08-24 17:21:30 2014-08-26 false its fleece was white as snow a pocket full of posies blue +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/join46.q.out ql/src/test/results/clientpositive/llap/join46.q.out index 07c4a6234e..95d3611de1 100644 --- ql/src/test/results/clientpositive/llap/join46.q.out +++ ql/src/test/results/clientpositive/llap/join46.q.out @@ -1633,7 +1633,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1746,7 +1746,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1857,7 +1857,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1970,7 +1970,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col1 (type: int) 1 _col1 (type: int) @@ -2148,7 +2148,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 diff --git ql/src/test/results/clientpositive/llap/join_max_hashtable.q.out ql/src/test/results/clientpositive/llap/join_max_hashtable.q.out index c0c9f95889..c3b1eb7b80 100644 --- ql/src/test/results/clientpositive/llap/join_max_hashtable.q.out +++ ql/src/test/results/clientpositive/llap/join_max_hashtable.q.out @@ -230,6 +230,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true File Output Operator compressed: false @@ -318,6 +319,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true File Output Operator compressed: false @@ -407,6 +409,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true File Output Operator compressed: false @@ -495,6 +498,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true File Output Operator compressed: false diff --git ql/src/test/results/clientpositive/llap/limit_pushdown.q.out ql/src/test/results/clientpositive/llap/limit_pushdown.q.out index 1d19fc0a29..8d1aaee0ad 100644 --- ql/src/test/results/clientpositive/llap/limit_pushdown.q.out +++ ql/src/test/results/clientpositive/llap/limit_pushdown.q.out @@ -456,6 +456,7 @@ POSTHOOK: query: select distinct(cdouble) as dis from alltypesorc order by dis l POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-16243.0 -16269.0 -16274.0 -16277.0 @@ -475,7 +476,6 @@ POSTHOOK: Input: default@alltypesorc -16372.0 -16373.0 -16379.0 -NULL PREHOOK: query: explain select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -554,6 +554,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesorc grou POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -573,7 +574,6 @@ POSTHOOK: Input: default@alltypesorc -62 27 -63 19 -64 24 -NULL 2932 PREHOOK: query: explain select ctinyint, count(cdouble) from (select ctinyint, cdouble from alltypesorc group by ctinyint, cdouble) t1 group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -652,6 +652,7 @@ POSTHOOK: query: select ctinyint, count(cdouble) from (select ctinyint, cdouble POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -671,7 +672,6 @@ POSTHOOK: Input: default@alltypesorc -62 27 -63 19 -64 24 -NULL 2932 PREHOOK: query: explain select ctinyint, count(distinct(cstring1)), count(distinct(cstring2)) from alltypesorc group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -747,6 +747,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cstring1)), count(distinct(cstr POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 3 24 -46 3 19 -47 3 23 -48 3 27 @@ -766,7 +767,6 @@ POSTHOOK: Input: default@alltypesorc -62 3 23 -63 3 16 -64 3 13 -NULL 3065 3 PREHOOK: query: explain select key,value from src order by key limit 0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out index 1027bfe85a..1f9d6928c0 100644 --- ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out +++ ql/src/test/results/clientpositive/llap/limit_pushdown3.q.out @@ -497,7 +497,6 @@ POSTHOOK: query: select distinct(cdouble) as dis from alltypesorc order by dis l POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -16379.0 -16373.0 -16372.0 @@ -517,6 +516,7 @@ NULL -16277.0 -16274.0 -16269.0 +-16243.0 PREHOOK: query: explain select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -609,7 +609,6 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesorc grou POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 2932 -64 24 -63 19 -62 27 @@ -629,6 +628,7 @@ NULL 2932 -48 29 -47 22 -46 24 +-45 24 PREHOOK: query: explain select ctinyint, count(cdouble) from (select ctinyint, cdouble from alltypesorc group by ctinyint, cdouble) t1 group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -721,7 +721,6 @@ POSTHOOK: query: select ctinyint, count(cdouble) from (select ctinyint, cdouble POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 2932 -64 24 -63 19 -62 27 @@ -741,6 +740,7 @@ NULL 2932 -48 29 -47 22 -46 24 +-45 24 PREHOOK: query: explain select ctinyint, count(distinct(cstring1)), count(distinct(cstring2)) from alltypesorc group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -830,7 +830,6 @@ POSTHOOK: query: select ctinyint, count(distinct(cstring1)), count(distinct(cstr POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 3065 3 -64 3 13 -63 3 16 -62 3 23 @@ -850,6 +849,7 @@ NULL 3065 3 -48 3 27 -47 3 23 -46 3 19 +-45 3 24 PREHOOK: query: explain select key,value from src order by key limit 0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/lineage2.q.out ql/src/test/results/clientpositive/llap/lineage2.q.out index 76f0c9de30..f56b100046 100644 --- ql/src/test/results/clientpositive/llap/lineage2.q.out +++ ql/src/test/results/clientpositive/llap/lineage2.q.out @@ -634,7 +634,7 @@ having count(a.c2) > 0 PREHOOK: type: QUERY PREHOOK: Input: default@dest_l2 #### A masked pattern was here #### -{"version":"1.0","engine":"tez","database":"default","hash":"4e60ca1e72d985639b2027021a199297","queryText":"select sum(a.c1) over (partition by a.c1 order by a.id)\nfrom dest_l2 a\nwhere a.c2 != 10\ngroup by a.c1, a.c2, a.id\nhaving count(a.c2) > 0","edges":[{"sources":[1,2,3],"targets":[0],"expression":"(tok_function sum (. (tok_table_or_col $hdt$_0) c1) (tok_windowspec (tok_partitioningspec (tok_distributeby (. (tok_table_or_col $hdt$_0) c1)) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_first (. (tok_table_or_col $hdt$_0) id))))) (tok_windowvalues (preceding 2147483647) current)))","edgeType":"PROJECTION"},{"sources":[2],"targets":[0],"expression":"(a.c2 <> 10)","edgeType":"PREDICATE"},{"sources":[2],"targets":[0],"expression":"(count(default.dest_l2.c2) > 0L)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"sum_window_0"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l2.c1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_l2.c2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_l2.id"}]} +{"version":"1.0","engine":"tez","database":"default","hash":"4e60ca1e72d985639b2027021a199297","queryText":"select sum(a.c1) over (partition by a.c1 order by a.id)\nfrom dest_l2 a\nwhere a.c2 != 10\ngroup by a.c1, a.c2, a.id\nhaving count(a.c2) > 0","edges":[{"sources":[1,2,3],"targets":[0],"expression":"(tok_function sum (. (tok_table_or_col $hdt$_0) c1) (tok_windowspec (tok_partitioningspec (tok_distributeby (. (tok_table_or_col $hdt$_0) c1)) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_last (. (tok_table_or_col $hdt$_0) id))))) (tok_windowvalues (preceding 2147483647) current)))","edgeType":"PROJECTION"},{"sources":[2],"targets":[0],"expression":"(a.c2 <> 10)","edgeType":"PREDICATE"},{"sources":[2],"targets":[0],"expression":"(count(default.dest_l2.c2) > 0L)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"sum_window_0"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l2.c1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_l2.c2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_l2.id"}]} 1 PREHOOK: query: select sum(a.c1), count(b.c1), b.c2, b.c3 from dest_l2 a join dest_l3 b on (a.id = b.id) diff --git ql/src/test/results/clientpositive/llap/lineage3.q.out ql/src/test/results/clientpositive/llap/lineage3.q.out index e05d4527b3..cf38816127 100644 --- ql/src/test/results/clientpositive/llap/lineage3.q.out +++ ql/src/test/results/clientpositive/llap/lineage3.q.out @@ -67,7 +67,7 @@ where cint > 10 and cint < 10000 limit 10 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc #### A masked pattern was here #### -{"version":"1.0","engine":"tez","database":"default","hash":"af879e003bd60eb1f8ff064bd3f362ac","queryText":"select cint, rank() over(order by cint) from alltypesorc\nwhere cint > 10 and cint < 10000 limit 10","edges":[{"sources":[2],"targets":[0],"edgeType":"PROJECTION"},{"sources":[3,4,2,5,6,7,8,9,10,11,12,13],"targets":[1],"expression":"(tok_function rank (tok_windowspec (tok_partitioningspec (tok_distributeby 0) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_first (. (tok_table_or_col alltypesorc) cint))))) (tok_windowrange (preceding 2147483647) (following 2147483647))))","edgeType":"PROJECTION"},{"sources":[2],"targets":[0,1],"expression":"((alltypesorc.cint > 10) and (alltypesorc.cint < 10000))","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"cint"},{"id":1,"vertexType":"COLUMN","vertexId":"rank_window_0"},{"id":2,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":4,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cbigint"},{"id":6,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cdouble"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring2"},{"id":10,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctimestamp1"},{"id":11,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctimestamp2"},{"id":12,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"},{"id":13,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean2"}]} +{"version":"1.0","engine":"tez","database":"default","hash":"af879e003bd60eb1f8ff064bd3f362ac","queryText":"select cint, rank() over(order by cint) from alltypesorc\nwhere cint > 10 and cint < 10000 limit 10","edges":[{"sources":[2],"targets":[0],"edgeType":"PROJECTION"},{"sources":[3,4,2,5,6,7,8,9,10,11,12,13],"targets":[1],"expression":"(tok_function rank (tok_windowspec (tok_partitioningspec (tok_distributeby 0) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_last (. (tok_table_or_col alltypesorc) cint))))) (tok_windowrange (preceding 2147483647) (following 2147483647))))","edgeType":"PROJECTION"},{"sources":[2],"targets":[0,1],"expression":"((alltypesorc.cint > 10) and (alltypesorc.cint < 10000))","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"cint"},{"id":1,"vertexType":"COLUMN","vertexId":"rank_window_0"},{"id":2,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":4,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cbigint"},{"id":6,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cdouble"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring2"},{"id":10,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctimestamp1"},{"id":11,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctimestamp2"},{"id":12,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"},{"id":13,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean2"}]} 762 1 762 1 762 1 @@ -237,8 +237,8 @@ PREHOOK: Input: default@alltypesorc PREHOOK: Input: default@dest_v1 #### A masked pattern was here #### {"version":"1.0","engine":"tez","database":"default","hash":"3d35b5bc2418de2cc033311606ac03cf","queryText":"select * from dest_v1 order by ctinyint, cint limit 2","edges":[{"sources":[2],"targets":[0],"edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"edgeType":"PROJECTION"},{"sources":[2],"targets":[0,1],"expression":"alltypesorc.ctinyint is not null","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"dest_v1.ctinyint"},{"id":1,"vertexType":"COLUMN","vertexId":"dest_v1.cint"},{"id":2,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"}]} --64 NULL --64 NULL +-64 253665376 +-64 253665376 PREHOOK: query: alter view dest_v1 as select ctinyint from alltypesorc PREHOOK: type: CREATEVIEW PREHOOK: Input: default@alltypesorc @@ -311,13 +311,13 @@ PREHOOK: type: CREATEVIEW PREHOOK: Input: default@alltypesorc PREHOOK: Output: database:default PREHOOK: Output: default@dest_v3 -{"version":"1.0","engine":"tez","database":"default","hash":"81bb549360513aeae39a3bd971405be3","queryText":"alter view dest_v3 as\n select * from (\n select sum(a.ctinyint) over (partition by a.csmallint order by a.csmallint) a,\n count(b.cstring1) x, b.cboolean1\n from alltypesorc a join alltypesorc b on (a.cint = b.cint)\n where a.cboolean2 = true and b.cfloat > 0\n group by a.ctinyint, a.csmallint, b.cboolean1\n having count(a.cint) > 10\n order by a, x, b.cboolean1 limit 10) t_n20","edges":[{"sources":[3,4,5,6,7],"targets":[0],"expression":"(tok_function sum (. (tok_table_or_col a) ctinyint) (tok_windowspec (tok_partitioningspec (tok_distributeby (. (tok_table_or_col a) csmallint)) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_first (. (tok_table_or_col a) csmallint)))))))","edgeType":"PROJECTION"},{"sources":[6],"targets":[1],"expression":"count(default.alltypesorc.cstring1)","edgeType":"PROJECTION"},{"sources":[5],"targets":[2],"edgeType":"PROJECTION"},{"sources":[7],"targets":[0,1,2],"expression":"(a.cint = b.cint)","edgeType":"PREDICATE"},{"sources":[8,9],"targets":[0,1,2],"expression":"((a.cboolean2 = true) and (b.cfloat > 0.0))","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2],"expression":"(count(default.alltypesorc.cint) > 10L)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_v3.a"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_v3.x"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_v3.cboolean1"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":4,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"},{"id":6,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean2"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"}]} +{"version":"1.0","engine":"tez","database":"default","hash":"81bb549360513aeae39a3bd971405be3","queryText":"alter view dest_v3 as\n select * from (\n select sum(a.ctinyint) over (partition by a.csmallint order by a.csmallint) a,\n count(b.cstring1) x, b.cboolean1\n from alltypesorc a join alltypesorc b on (a.cint = b.cint)\n where a.cboolean2 = true and b.cfloat > 0\n group by a.ctinyint, a.csmallint, b.cboolean1\n having count(a.cint) > 10\n order by a, x, b.cboolean1 limit 10) t_n20","edges":[{"sources":[3,4,5,6,7],"targets":[0],"expression":"(tok_function sum (. (tok_table_or_col a) ctinyint) (tok_windowspec (tok_partitioningspec (tok_distributeby (. (tok_table_or_col a) csmallint)) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_last (. (tok_table_or_col a) csmallint)))))))","edgeType":"PROJECTION"},{"sources":[6],"targets":[1],"expression":"count(default.alltypesorc.cstring1)","edgeType":"PROJECTION"},{"sources":[5],"targets":[2],"edgeType":"PROJECTION"},{"sources":[7],"targets":[0,1,2],"expression":"(a.cint = b.cint)","edgeType":"PREDICATE"},{"sources":[8,9],"targets":[0,1,2],"expression":"((a.cboolean2 = true) and (b.cfloat > 0.0))","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2],"expression":"(count(default.alltypesorc.cint) > 10L)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_v3.a"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_v3.x"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_v3.cboolean1"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":4,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"},{"id":6,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean2"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"}]} PREHOOK: query: select * from dest_v3 limit 2 PREHOOK: type: QUERY PREHOOK: Input: default@alltypesorc PREHOOK: Input: default@dest_v3 #### A masked pattern was here #### -{"version":"1.0","engine":"tez","database":"default","hash":"fd4e0dd59f42b53fc07125817451df49","queryText":"select * from dest_v3 limit 2","edges":[{"sources":[3,4,5,6,7],"targets":[0],"expression":"(tok_function sum (. (tok_table_or_col $hdt$_0) ctinyint) (tok_windowspec (tok_partitioningspec (tok_distributeby (. (tok_table_or_col $hdt$_0) csmallint)) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_first (. (tok_table_or_col $hdt$_0) csmallint))))) (tok_windowvalues (preceding 2147483647) current)))","edgeType":"PROJECTION"},{"sources":[6],"targets":[1],"expression":"count(default.alltypesorc.cstring1)","edgeType":"PROJECTION"},{"sources":[5],"targets":[2],"edgeType":"PROJECTION"},{"sources":[8,7],"targets":[0,1,2],"expression":"(a.cboolean2 and a.cint is not null)","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2],"expression":"(a.cint = b.cint)","edgeType":"PREDICATE"},{"sources":[9,7],"targets":[0,1,2],"expression":"((b.cfloat > 0) and b.cint is not null)","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2],"expression":"(count(default.alltypesorc.cint) > 10L)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"dest_v3.a"},{"id":1,"vertexType":"COLUMN","vertexId":"dest_v3.x"},{"id":2,"vertexType":"COLUMN","vertexId":"dest_v3.cboolean1"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":4,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"},{"id":6,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean2"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"}]} +{"version":"1.0","engine":"tez","database":"default","hash":"fd4e0dd59f42b53fc07125817451df49","queryText":"select * from dest_v3 limit 2","edges":[{"sources":[3,4,5,6,7],"targets":[0],"expression":"(tok_function sum (. (tok_table_or_col $hdt$_0) ctinyint) (tok_windowspec (tok_partitioningspec (tok_distributeby (. (tok_table_or_col $hdt$_0) csmallint)) (tok_orderby (tok_tabsortcolnameasc (tok_nulls_last (. (tok_table_or_col $hdt$_0) csmallint))))) (tok_windowvalues (preceding 2147483647) current)))","edgeType":"PROJECTION"},{"sources":[6],"targets":[1],"expression":"count(default.alltypesorc.cstring1)","edgeType":"PROJECTION"},{"sources":[5],"targets":[2],"edgeType":"PROJECTION"},{"sources":[8,7],"targets":[0,1,2],"expression":"(a.cboolean2 and a.cint is not null)","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2],"expression":"(a.cint = b.cint)","edgeType":"PREDICATE"},{"sources":[9,7],"targets":[0,1,2],"expression":"((b.cfloat > 0) and b.cint is not null)","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2],"expression":"(count(default.alltypesorc.cint) > 10L)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"dest_v3.a"},{"id":1,"vertexType":"COLUMN","vertexId":"dest_v3.x"},{"id":2,"vertexType":"COLUMN","vertexId":"dest_v3.cboolean1"},{"id":3,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":4,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"},{"id":6,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean2"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"}]} 38 216 false 38 229 true PREHOOK: query: drop table if exists src_dp diff --git ql/src/test/results/clientpositive/llap/llap_acid.q.out ql/src/test/results/clientpositive/llap/llap_acid.q.out index 635f928cc4..d441ab03fc 100644 --- ql/src/test/results/clientpositive/llap/llap_acid.q.out +++ ql/src/test/results/clientpositive/llap/llap_acid.q.out @@ -115,10 +115,10 @@ STAGE PLANS: projectedOutputColumnNums: [0, 4, 1] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 0] + keyColumns: 4:smallint, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -142,7 +142,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -259,11 +259,11 @@ STAGE PLANS: projectedOutputColumnNums: [5, 2, 3, 4] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:struct native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [6] - valueColumnNums: [2, 3, 4] + partitionColumns: 6:int + valueColumns: 2:float, 3:double, 4:smallint Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -288,7 +288,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -369,10 +369,10 @@ STAGE PLANS: projectedOutputColumnNums: [0, 4, 1] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 0] + keyColumns: 4:smallint, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -396,7 +396,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out index c4dc6f7601..a3e09d1598 100644 --- ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out +++ ql/src/test/results/clientpositive/llap/llap_acid_fast.q.out @@ -109,10 +109,10 @@ STAGE PLANS: projectedOutputColumnNums: [0, 4, 1] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 0] + keyColumns: 4:smallint, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -136,7 +136,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -253,11 +253,11 @@ STAGE PLANS: projectedOutputColumnNums: [5, 2, 3, 4] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:struct native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [6] - valueColumnNums: [2, 3, 4] + partitionColumns: 6:int + valueColumns: 2:float, 3:double, 4:smallint Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -282,7 +282,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -363,10 +363,10 @@ STAGE PLANS: projectedOutputColumnNums: [0, 4, 1] Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 0] + keyColumns: 4:smallint, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Execution mode: vectorized, llap LLAP IO: may be used (ACID table) Map Vectorization: @@ -390,7 +390,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/llap_partitioned.q.out ql/src/test/results/clientpositive/llap/llap_partitioned.q.out index e6fa1ac2fb..f078ecc2b5 100644 --- ql/src/test/results/clientpositive/llap/llap_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/llap_partitioned.q.out @@ -1645,13 +1645,15 @@ STAGE PLANS: 0 ctinyint (type: tinyint) 1 ctinyint (type: tinyint) Map Join Vectorization: - bigTableKeyColumnNums: [10] - bigTableRetainedColumnNums: [1, 6, 7, 10] - bigTableValueColumnNums: [1, 6, 7, 10] + bigTableKeyColumns: 10:tinyint + bigTableRetainColumnNums: [1, 6, 7, 10] + bigTableValueColumns: 1:int, 6:char(255), 7:varchar(255), 10:tinyint className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1, 6, 7, 10] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 1:int, 6:char(255), 7:varchar(255), 10:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1, _col6, _col7, _col10 input vertices: 1 Map 2 @@ -1706,10 +1708,9 @@ STAGE PLANS: Map-reduce partition columns: ctinyint (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [10] + keyColumns: 10:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 10 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: ctinyint (type: tinyint) @@ -2115,10 +2116,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 694 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/llap_smb_ptf.q.out ql/src/test/results/clientpositive/llap/llap_smb_ptf.q.out index 8ac21ff54c..3830bf6440 100644 --- ql/src/test/results/clientpositive/llap/llap_smb_ptf.q.out +++ ql/src/test/results/clientpositive/llap/llap_smb_ptf.q.out @@ -591,7 +591,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col0, _col4 raw input shape: window functions: @@ -715,7 +715,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col3, _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out index b16b8d0d3b..1e2b330c2b 100644 --- ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out +++ ql/src/test/results/clientpositive/llap/llap_vector_nohybridgrace.q.out @@ -64,6 +64,7 @@ STAGE PLANS: className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Fast Hash Table and No Hybrid Hash Join IS true + hashTableImplementationType: FAST input vertices: 1 Map 3 Statistics: Num rows: 24737 Data size: 197896 Basic stats: COMPLETE Column stats: COMPLETE @@ -245,6 +246,7 @@ STAGE PLANS: className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Fast Hash Table and No Hybrid Hash Join IS true + hashTableImplementationType: FAST input vertices: 1 Map 3 Statistics: Num rows: 24737 Data size: 197896 Basic stats: COMPLETE Column stats: COMPLETE diff --git ql/src/test/results/clientpositive/llap/mapjoin2.q.out ql/src/test/results/clientpositive/llap/mapjoin2.q.out index 872f918efd..c0d20b4f08 100644 --- ql/src/test/results/clientpositive/llap/mapjoin2.q.out +++ ql/src/test/results/clientpositive/llap/mapjoin2.q.out @@ -26,6 +26,86 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@tbl_n1 POSTHOOK: Lineage: tbl_n1.n SCRIPT [] POSTHOOK: Lineage: tbl_n1.t SCRIPT [] +Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Map 1' is a cross product +PREHOOK: query: explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 2 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 1L) (type: boolean) + Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (n = 1L) (type: boolean) + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: t (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + input vertices: + 1 Map 2 + Statistics: Num rows: 1 Data size: 182 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 1L (type: bigint), _col0 (type: string), _col1 is null (type: boolean), _col2 is null (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 103 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 103 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: tbl_n1 + Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: false (type: boolean) + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: n (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -36,6 +116,89 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### 1 one true true +Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Map 2' is a cross product +PREHOOK: query: explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tbl_n1 + Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: false (type: boolean) + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: n (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 2L) (type: boolean) + Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (n = 2L) (type: boolean) + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: t (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 87 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 1 Data size: 182 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 is null (type: boolean), _col1 is null (type: boolean), 2L (type: bigint), _col2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 103 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 103 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -46,6 +209,95 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### true true 2 two +Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 1L) (type: boolean) + Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (n = 1L) (type: boolean) + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 1L (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map 3 + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 2L) (type: boolean) + Statistics: Num rows: 2 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Filter Operator + predicate: (n = 2L) (type: boolean) + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 2L (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint), _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 + 1 {false} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 190 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 is null (type: boolean), _col1 is null (type: boolean), _col2 is null (type: boolean), _col3 is null (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -57,6 +309,75 @@ POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### false false true true true true false false +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 2 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + input vertices: + 1 Map 2 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 1 (type: int), 0 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -67,6 +388,79 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 2 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col1, _col2 + input vertices: + 1 Map 2 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), _col1 (type: int), 0 (type: int), _col2 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -77,6 +471,82 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int) + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), 1 (type: int), _col2 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -87,6 +557,82 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int) + Execution mode: llap + LLAP IO: no inputs + Map 2 + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), 1 (type: int), _col2 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: no inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/mapjoin46.q.out ql/src/test/results/clientpositive/llap/mapjoin46.q.out index 52eb609414..d0d9c87a49 100644 --- ql/src/test/results/clientpositive/llap/mapjoin46.q.out +++ ql/src/test/results/clientpositive/llap/mapjoin46.q.out @@ -128,14 +128,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema -100 1 Bob NULL NULL NULL -101 2 Car 102 2 Del -101 2 Car 103 2 Ema +NULL NULL None NULL NULL NULL PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 LEFT OUTER JOIN test2_n2 @@ -239,12 +239,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat NULL NULL NULL -100 1 Bob NULL NULL NULL -101 2 Car 102 2 Del +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -344,12 +344,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL +100 1 Bob 102 2 Del +101 2 Car 102 2 Del 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat NULL NULL NULL -100 1 Bob 102 2 Del -101 2 Car 102 2 Del +NULL NULL None NULL NULL NULL PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 RIGHT OUTER JOIN test2_n2 @@ -438,10 +438,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -99 2 Mat 102 2 Del 101 2 Car 102 2 Del -99 2 Mat 103 2 Ema 101 2 Car 103 2 Ema +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product @@ -535,18 +535,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat NULL NULL NULL 100 1 Bob 102 2 Del -100 1 Bob 105 NULL None -100 1 Bob 104 3 Fli 100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None 101 2 Car 102 2 Del -101 2 Car 105 NULL None -101 2 Car 104 3 Fli 101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat NULL NULL NULL +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -644,19 +644,19 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -98 NULL None 102 2 Del -99 0 Alice 102 2 Del -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del -100 1 Bob 105 NULL None -100 1 Bob 104 3 Fli 100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None 101 2 Car 102 2 Del -101 2 Car 105 NULL None -101 2 Car 104 3 Fli 101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -750,19 +750,19 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del -100 1 Bob 105 NULL None -100 1 Bob 104 3 Fli 100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None 101 2 Car 102 2 Del -101 2 Car 105 NULL None -101 2 Car 104 3 Fli 101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -856,14 +856,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema 98 NULL None 102 2 Del 99 0 Alice 102 2 Del 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema -100 1 Bob 102 2 Del -101 2 Car 102 2 Del -101 2 Car 103 2 Ema +NULL NULL None 102 2 Del PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 LEFT OUTER JOIN test2_n2 @@ -961,13 +961,13 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -1065,19 +1065,19 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -101 2 Car 102 2 Del 100 1 Bob 102 2 Del -99 2 Mat 102 2 Del -99 0 Alice 102 2 Del -98 NULL None 102 2 Del -101 2 Car 103 2 Ema 100 1 Bob 103 2 Ema -99 2 Mat 103 2 Ema -101 2 Car 104 3 Fli 100 1 Bob 104 3 Fli -101 2 Car 105 NULL None 100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -1171,16 +1171,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -101 2 Car 102 2 Del 100 1 Bob 102 2 Del -99 2 Mat 102 2 Del -101 2 Car 103 2 Ema 100 1 Bob 103 2 Ema -99 2 Mat 103 2 Ema -101 2 Car 104 3 Fli 100 1 Bob 104 3 Fli -101 2 Car 105 NULL None 100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -1274,16 +1274,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -101 2 Car 102 2 Del 100 1 Bob 102 2 Del -99 2 Mat 102 2 Del -99 0 Alice 102 2 Del -98 NULL None 102 2 Del +101 2 Car 102 2 Del 101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 RIGHT OUTER JOIN test2_n2 @@ -1381,9 +1381,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -99 2 Mat 102 2 Del 101 2 Car 102 2 Del 101 2 Car 103 2 Ema +99 2 Mat 102 2 Del NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product @@ -1448,7 +1448,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1488,11 +1488,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -98 NULL None 102 2 Del -99 0 Alice 102 2 Del -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -1501,18 +1496,25 @@ NULL NULL None 102 2 Del 101 2 Car 103 2 Ema 101 2 Car 104 3 Fli 101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1561,12 +1563,12 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -1586,7 +1588,8 @@ Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reduce PREHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) PREHOOK: type: QUERY PREHOOK: Input: default@test1_n4 PREHOOK: Input: default@test2_n2 @@ -1594,16 +1597,12 @@ PREHOOK: Input: default@test2_n2 POSTHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -1612,18 +1611,23 @@ NULL NULL None NULL NULL NULL 101 2 Car 103 2 Ema 101 2 Car 104 3 Fli 101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1672,12 +1676,12 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -1697,7 +1701,7 @@ Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reduce PREHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY PREHOOK: Input: default@test1_n4 PREHOOK: Input: default@test2_n2 @@ -1705,34 +1709,36 @@ PREHOOK: Input: default@test2_n2 POSTHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -98 NULL None 102 2 Del -99 0 Alice 102 2 Del -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None 101 2 Car 102 2 Del 101 2 Car 103 2 Ema -NULL NULL NULL 104 3 Fli -NULL NULL NULL 105 NULL None +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1743,7 +1749,7 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 @@ -1756,11 +1762,9 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator - key expressions: _col1 (type: int) - sort order: + - Map-reduce partition columns: _col1 (type: int) + sort order: Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: int), _col2 (type: string) + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: llap LLAP IO: no inputs Map 3 @@ -1773,11 +1777,9 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator - key expressions: _col1 (type: int) - sort order: + - Map-reduce partition columns: _col1 (type: int) + sort order: Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: int), _col2 (type: string) + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: llap LLAP IO: no inputs Reducer 2 @@ -1785,16 +1787,16 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: - 0 _col1 (type: int) - 1 _col1 (type: int) + 0 + 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} - Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1806,11 +1808,11 @@ STAGE PLANS: Processor Tree: ListSink +Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY PREHOOK: Input: default@test1_n4 PREHOOK: Input: default@test2_n2 @@ -1818,29 +1820,474 @@ PREHOOK: Input: default@test2_n2 POSTHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL NULL 105 NULL None -NULL NULL None NULL NULL NULL +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL -100 1 Bob NULL NULL NULL 99 2 Mat 102 2 Del -101 2 Car 102 2 Del -101 2 Car 103 2 Ema -NULL NULL NULL 104 3 Fli -Warning: Shuffle Join MERGEJOIN[36][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 2' is a cross product +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product PREHOOK: query: EXPLAIN SELECT * -FROM ( - SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, - test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 - FROM test1_n4 RIGHT OUTER JOIN test2_n2 - ON (test1_n4.value=test2_n2.value +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 3 + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del +Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 3 + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del +PREHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 3 + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None NULL NULL NULL +PREHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 3 + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 10 Data size: 191 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None NULL NULL NULL +Warning: Shuffle Join MERGEJOIN[36][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value AND (test1_n4.key between 100 and 102 OR test2_n2.key between 100 and 102)) ) sq1 @@ -1972,7 +2419,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -2038,23 +2485,248 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -99 2 Mat 102 2 Del 99 0 Alice NULL NULL NULL -99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL -101 2 Car 102 2 Del 99 0 Alice NULL NULL NULL 101 2 Car 102 2 Del 100 1 Bob NULL NULL NULL -101 2 Car 103 2 Ema 99 0 Alice NULL NULL NULL +101 2 Car 102 2 Del 99 0 Alice NULL NULL NULL 101 2 Car 103 2 Ema 100 1 Bob NULL NULL NULL -NULL NULL NULL 104 3 Fli NULL NULL None NULL NULL NULL -NULL NULL NULL 104 3 Fli 98 NULL None NULL NULL NULL -NULL NULL NULL 104 3 Fli 99 0 Alice NULL NULL NULL -NULL NULL NULL 104 3 Fli 99 2 Mat 102 2 Del +101 2 Car 103 2 Ema 99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL +99 2 Mat 102 2 Del 99 0 Alice NULL NULL NULL NULL NULL NULL 104 3 Fli 100 1 Bob NULL NULL NULL NULL NULL NULL 104 3 Fli 101 2 Car 102 2 Del NULL NULL NULL 104 3 Fli 101 2 Car 103 2 Ema -NULL NULL NULL 105 NULL None NULL NULL None NULL NULL NULL +NULL NULL NULL 104 3 Fli 98 NULL None NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli NULL NULL None NULL NULL NULL +NULL NULL NULL 105 NULL None 100 1 Bob NULL NULL NULL +NULL NULL NULL 105 NULL None 101 2 Car 102 2 Del +NULL NULL NULL 105 NULL None 101 2 Car 103 2 Ema NULL NULL NULL 105 NULL None 98 NULL None NULL NULL NULL NULL NULL NULL 105 NULL None 99 0 Alice NULL NULL NULL NULL NULL NULL 105 NULL None 99 2 Mat 102 2 Del +NULL NULL NULL 105 NULL None NULL NULL None NULL NULL NULL +Warning: Shuffle Join MERGEJOIN[36][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 4 (BROADCAST_EDGE) + Map 3 <- Map 1 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 3 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: string) + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + input vertices: + 1 Map 4 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 3 + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + input vertices: + 0 Map 1 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string) + Execution mode: llap + LLAP IO: no inputs + Map 4 + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: string) + Execution mode: llap + LLAP IO: no inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))} + Statistics: Num rows: 64 Data size: 23107 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 64 Data size: 23107 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join MERGEJOIN[36][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Reducer 2' is a cross product +PREHOOK: query: SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +101 2 Car 102 2 Del 100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del 99 0 Alice NULL NULL NULL +101 2 Car 103 2 Ema 100 1 Bob NULL NULL NULL +101 2 Car 103 2 Ema 99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL +99 2 Mat 102 2 Del 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 100 1 Bob NULL NULL NULL +NULL NULL NULL 104 3 Fli 101 2 Car 102 2 Del +NULL NULL NULL 104 3 Fli 101 2 Car 103 2 Ema +NULL NULL NULL 104 3 Fli 98 NULL None NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli NULL NULL None NULL NULL NULL NULL NULL NULL 105 NULL None 100 1 Bob NULL NULL NULL NULL NULL NULL 105 NULL None 101 2 Car 102 2 Del NULL NULL NULL 105 NULL None 101 2 Car 103 2 Ema +NULL NULL NULL 105 NULL None 98 NULL None NULL NULL NULL +NULL NULL NULL 105 NULL None 99 0 Alice NULL NULL NULL +NULL NULL NULL 105 NULL None 99 2 Mat 102 2 Del +NULL NULL NULL 105 NULL None NULL NULL None NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/mergejoin.q.out ql/src/test/results/clientpositive/llap/mergejoin.q.out index 1e4f6325e1..b86d822796 100644 --- ql/src/test/results/clientpositive/llap/mergejoin.q.out +++ ql/src/test/results/clientpositive/llap/mergejoin.q.out @@ -52,10 +52,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:string Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -105,10 +105,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:string Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Select Operator @@ -135,10 +135,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:string, 1:string, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: vectorized, llap @@ -176,6 +175,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -208,10 +210,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:string, 1:string, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) @@ -444,6 +445,9 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1535,6 +1539,9 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1655,6 +1662,9 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1764,10 +1774,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1809,10 +1818,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1837,7 +1845,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -1851,6 +1859,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1971,10 +1982,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:int Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -2025,10 +2036,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: _col0 (type: string) @@ -2054,10 +2064,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:string, 1:string, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: vectorized, llap @@ -2107,10 +2116,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2159,6 +2167,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -2178,6 +2189,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -2248,10 +2262,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:int, 1:int, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Reducer 7 @@ -2286,10 +2299,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:string, 1:string, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) @@ -2369,10 +2381,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2422,10 +2433,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2464,6 +2474,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2594,6 +2607,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Map 6 Map Operator Tree: TableScan @@ -2624,10 +2640,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2677,10 +2692,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2719,6 +2733,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -2819,10 +2836,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2872,10 +2888,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2914,6 +2929,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -3032,10 +3050,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:int Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -3086,10 +3104,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: _col0 (type: string) @@ -3115,10 +3132,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:string, 1:string, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) Execution mode: vectorized, llap @@ -3168,10 +3184,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -3220,6 +3235,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -3239,6 +3257,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -3309,10 +3330,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:int, 1:int, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: binary) Reducer 7 @@ -3347,10 +3367,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:string, 1:string, 2:binary Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: binary) @@ -3447,6 +3466,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Map 6 Map Operator Tree: TableScan @@ -3477,10 +3499,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -3530,10 +3551,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -3572,6 +3592,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -3683,10 +3706,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -3735,10 +3757,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -3786,6 +3807,9 @@ STAGE PLANS: sort order: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 5 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out index d95025c5c1..7d432f1842 100644 --- ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out +++ ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out @@ -461,7 +461,6 @@ POSTHOOK: query: select distinct(cdouble) as dis from alltypesorc order by dis l POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### --16309.0 -16307.0 -16306.0 -16305.0 @@ -481,6 +480,7 @@ POSTHOOK: Input: default@alltypesorc -16211.0 -16208.0 -16207.0 +-16201.0 PREHOOK: query: explain select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint order by ctinyint limit 10,20 PREHOOK: type: QUERY @@ -560,7 +560,6 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesorc grou POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### --55 29 -54 26 -53 22 -52 33 @@ -580,6 +579,7 @@ POSTHOOK: Input: default@alltypesorc -38 31 -37 20 -36 26 +-35 26 PREHOOK: query: explain select ctinyint, count(cdouble) from (select ctinyint, cdouble from alltypesorc group by ctinyint, cdouble) t1 group by ctinyint order by ctinyint limit 10,20 PREHOOK: type: QUERY @@ -659,7 +659,6 @@ POSTHOOK: query: select ctinyint, count(cdouble) from (select ctinyint, cdouble POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### --55 29 -54 26 -53 22 -52 33 @@ -679,6 +678,7 @@ POSTHOOK: Input: default@alltypesorc -38 31 -37 20 -36 26 +-35 26 PREHOOK: query: explain select ctinyint, count(distinct(cstring1)), count(distinct(cstring2)) from alltypesorc group by ctinyint order by ctinyint limit 10,20 PREHOOK: type: QUERY @@ -755,7 +755,6 @@ POSTHOOK: query: select ctinyint, count(distinct(cstring1)), count(distinct(cstr POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### --55 3 21 -54 3 21 -53 3 17 -52 3 21 @@ -775,6 +774,7 @@ POSTHOOK: Input: default@alltypesorc -38 3 19 -37 3 27 -36 3 18 +-35 3 21 PREHOOK: query: explain select key,value from src order by key limit 0,0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out index 54ccf58442..0a8a8a8522 100644 --- ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out +++ ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out @@ -237,7 +237,7 @@ Table Parameters: orc.bloom.filter.columns * orc.row.index.stride 1000 rawDataSize 1139514 - totalSize 55665 + totalSize 55686 #### A masked pattern was here #### # Storage Information @@ -255,7 +255,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n1 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16675 + HDFS_BYTES_READ: 16681 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 @@ -271,8 +271,8 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_GBY_8: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_7: 2100 - RECORDS_OUT_OPERATOR_SEL_6: 2100 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_SEL_6: 3 + RECORDS_OUT_OPERATOR_TS_0: 3 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 26 @@ -315,7 +315,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n1 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1055 + HDFS_BYTES_READ: 1047 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -327,18 +327,18 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 8 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 8 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 8 - RECORDS_OUT_OPERATOR_SEL_9: 8 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1048576 - ALLOCATED_USED_BYTES: 2732 + ALLOCATED_USED_BYTES: 2731 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 1055 + CACHE_MISS_BYTES: 1047 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 @@ -367,15 +367,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 22 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 22 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 22 - RECORDS_OUT_OPERATOR_SEL_9: 22 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -405,15 +405,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 16 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 16 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 16 - RECORDS_OUT_OPERATOR_SEL_9: 16 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -443,15 +443,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 18 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 18 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 18 - RECORDS_OUT_OPERATOR_SEL_9: 18 - RECORDS_OUT_OPERATOR_TS_0: 2000 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -487,9 +487,9 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 1 RECORDS_OUT_OPERATOR_SEL_9: 1 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -519,15 +519,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 32 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 32 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 32 - RECORDS_OUT_OPERATOR_SEL_9: 32 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -557,15 +557,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 32 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 32 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 32 - RECORDS_OUT_OPERATOR_SEL_9: 32 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -595,15 +595,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 1697 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 1697 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 1697 - RECORDS_OUT_OPERATOR_SEL_9: 1697 - RECORDS_OUT_OPERATOR_TS_0: 2000 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -633,15 +633,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 12 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 12 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 12 - RECORDS_OUT_OPERATOR_SEL_9: 12 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -671,15 +671,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 1713 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 1713 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 1713 - RECORDS_OUT_OPERATOR_SEL_9: 1713 - RECORDS_OUT_OPERATOR_TS_0: 2000 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -705,24 +705,24 @@ Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 - RECORDS_IN_Map_1: 1000 + RECORDS_IN_Map_1: 100 RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 6 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 6 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 6 - RECORDS_OUT_OPERATOR_SEL_9: 6 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 - ROWS_EMITTED: 1000 + ROWS_EMITTED: 100 SELECTED_ROWGROUPS: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 @@ -747,15 +747,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 50 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 50 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 50 - RECORDS_OUT_OPERATOR_SEL_9: 50 - RECORDS_OUT_OPERATOR_TS_0: 1100 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -785,15 +785,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 318 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 318 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 318 - RECORDS_OUT_OPERATOR_SEL_9: 318 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -875,7 +875,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_9: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 830 + CACHE_HIT_BYTES: 823 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -910,7 +910,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 830 + CACHE_HIT_BYTES: 823 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -959,16 +959,16 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1310720 - ALLOCATED_USED_BYTES: 13810 + ALLOCATED_USED_BYTES: 13812 CACHE_HIT_BYTES: 24 CACHE_MISS_BYTES: 5911 METADATA_CACHE_HIT: 2 @@ -999,13 +999,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 6 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 6 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 6 - RECORDS_OUT_OPERATOR_SEL_9: 6 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1037,13 +1037,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 6 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 6 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 6 - RECORDS_OUT_OPERATOR_SEL_9: 6 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1097,13 +1097,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2100 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2100 + RECORDS_OUT_OPERATOR_FIL_8: 3 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2100 - RECORDS_OUT_OPERATOR_SEL_9: 2100 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_SEL_9: 3 + RECORDS_OUT_OPERATOR_TS_0: 3 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1143,7 +1143,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_9: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1170,13 +1170,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1208,13 +1208,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1246,13 +1246,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1280,25 +1280,25 @@ Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 - RECORDS_IN_Map_1: 2100 + RECORDS_IN_Map_1: 2000 RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 81 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 81 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 81 - RECORDS_OUT_OPERATOR_SEL_9: 81 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + NUM_DECODED_BATCHES: 2 + NUM_VECTOR_BATCHES: 2 + ROWS_EMITTED: 2000 + SELECTED_ROWGROUPS: 2 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1318,25 +1318,25 @@ Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 - RECORDS_IN_Map_1: 2100 + RECORDS_IN_Map_1: 2000 RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 74 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 74 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 74 - RECORDS_OUT_OPERATOR_SEL_9: 74 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + NUM_DECODED_BATCHES: 2 + NUM_VECTOR_BATCHES: 2 + ROWS_EMITTED: 2000 + SELECTED_ROWGROUPS: 2 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1360,13 +1360,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 12 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 12 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 12 - RECORDS_OUT_OPERATOR_SEL_9: 12 - RECORDS_OUT_OPERATOR_TS_0: 2000 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1398,13 +1398,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 13 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 13 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 13 - RECORDS_OUT_OPERATOR_SEL_9: 13 - RECORDS_OUT_OPERATOR_TS_0: 2000 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1442,7 +1442,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 1 RECORDS_OUT_OPERATOR_SEL_9: 1 - RECORDS_OUT_OPERATOR_TS_0: 100 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1474,13 +1474,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 7 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 7 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 7 - RECORDS_OUT_OPERATOR_SEL_9: 7 - RECORDS_OUT_OPERATOR_TS_0: 1100 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1520,7 +1520,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_9: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1555,7 +1555,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_9: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1590,7 +1590,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_9: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1617,13 +1617,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 100 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1655,13 +1655,13 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 6 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 6 + RECORDS_OUT_OPERATOR_FIL_8: 2 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 6 - RECORDS_OUT_OPERATOR_SEL_9: 6 - RECORDS_OUT_OPERATOR_TS_0: 1100 + RECORDS_OUT_OPERATOR_SEL_9: 2 + RECORDS_OUT_OPERATOR_TS_0: 2 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 @@ -1693,15 +1693,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 6990 + CACHE_HIT_BYTES: 6982 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1731,15 +1731,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2 + RECORDS_OUT_OPERATOR_FIL_8: 1 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2 - RECORDS_OUT_OPERATOR_SEL_9: 2 - RECORDS_OUT_OPERATOR_TS_0: 100 + RECORDS_OUT_OPERATOR_SEL_9: 1 + RECORDS_OUT_OPERATOR_TS_0: 1 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 6990 + CACHE_HIT_BYTES: 6982 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 diff --git ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out index 93e2667f28..4d8400e36c 100644 --- ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out +++ ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out @@ -237,7 +237,7 @@ Table Parameters: orc.bloom.filter.columns * orc.row.index.stride 1000 rawDataSize 1139514 - totalSize 55665 + totalSize 55686 #### A masked pattern was here #### # Storage Information @@ -255,7 +255,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 + HDFS_BYTES_READ: 17728 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 8 HDFS_LARGE_READ_OPS: 0 @@ -267,18 +267,18 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2094 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2094 + RECORDS_OUT_OPERATOR_FIL_8: 3 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2094 - RECORDS_OUT_OPERATOR_SEL_9: 2094 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_SEL_9: 3 + RECORDS_OUT_OPERATOR_TS_0: 3 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1310720 - ALLOCATED_USED_BYTES: 2758 + ALLOCATED_USED_BYTES: 2757 CACHE_HIT_BYTES: 0 - CACHE_MISS_BYTES: 1079 + CACHE_MISS_BYTES: 1071 METADATA_CACHE_MISS: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -307,15 +307,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 2094 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 - RECORDS_OUT_OPERATOR_FIL_8: 2094 + RECORDS_OUT_OPERATOR_FIL_8: 3 RECORDS_OUT_OPERATOR_FS_12: 1 RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 2094 - RECORDS_OUT_OPERATOR_SEL_9: 2094 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_SEL_9: 3 + RECORDS_OUT_OPERATOR_TS_0: 3 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 diff --git ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out index b8ea5cfc28..42c2f5be8e 100644 --- ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out +++ ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out @@ -207,7 +207,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n2 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16675 + HDFS_BYTES_READ: 16681 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 @@ -267,7 +267,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n2 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1055 + HDFS_BYTES_READ: 1047 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -288,9 +288,9 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1048576 - ALLOCATED_USED_BYTES: 2732 + ALLOCATED_USED_BYTES: 2731 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 1055 + CACHE_MISS_BYTES: 1047 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 @@ -327,7 +327,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 22 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -365,7 +365,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 16 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -403,7 +403,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -441,7 +441,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 1 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -479,7 +479,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 32 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -517,7 +517,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 32 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -555,7 +555,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 1697 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -593,7 +593,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 12 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -631,7 +631,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 1713 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -657,7 +657,7 @@ Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 - RECORDS_IN_Map_1: 1000 + RECORDS_IN_Map_1: 100 RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 6 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 @@ -667,14 +667,14 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_3: 6 RECORDS_OUT_OPERATOR_SEL_2: 6 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_TS_0: 100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 - ROWS_EMITTED: 1000 + ROWS_EMITTED: 100 SELECTED_ROWGROUPS: 1 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 @@ -707,7 +707,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 50 RECORDS_OUT_OPERATOR_TS_0: 1100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -745,7 +745,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 318 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -827,7 +827,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 830 + CACHE_HIT_BYTES: 823 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -862,7 +862,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 830 + CACHE_HIT_BYTES: 823 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -920,7 +920,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1310720 - ALLOCATED_USED_BYTES: 13810 + ALLOCATED_USED_BYTES: 13812 CACHE_HIT_BYTES: 24 CACHE_MISS_BYTES: 5911 METADATA_CACHE_HIT: 2 @@ -1095,7 +1095,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1232,7 +1232,7 @@ Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 - RECORDS_IN_Map_1: 2100 + RECORDS_IN_Map_1: 2000 RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 81 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 @@ -1242,15 +1242,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_3: 81 RECORDS_OUT_OPERATOR_SEL_2: 81 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + NUM_DECODED_BATCHES: 2 + NUM_VECTOR_BATCHES: 2 + ROWS_EMITTED: 2000 + SELECTED_ROWGROUPS: 2 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1270,7 +1270,7 @@ Stage-1 FILE SYSTEM COUNTERS: Stage-1 HIVE COUNTERS: CREATED_FILES: 1 DESERIALIZE_ERRORS: 0 - RECORDS_IN_Map_1: 2100 + RECORDS_IN_Map_1: 2000 RECORDS_OUT_0: 1 RECORDS_OUT_INTERMEDIATE_Map_1: 74 RECORDS_OUT_INTERMEDIATE_Reducer_2: 0 @@ -1280,15 +1280,15 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_3: 74 RECORDS_OUT_OPERATOR_SEL_2: 74 - RECORDS_OUT_OPERATOR_TS_0: 2100 + RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 5935 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 - NUM_DECODED_BATCHES: 3 - NUM_VECTOR_BATCHES: 3 - ROWS_EMITTED: 2100 - SELECTED_ROWGROUPS: 3 + NUM_DECODED_BATCHES: 2 + NUM_VECTOR_BATCHES: 2 + ROWS_EMITTED: 2000 + SELECTED_ROWGROUPS: 2 Stage-1 INPUT COUNTERS: GROUPED_INPUT_SPLITS_Map_1: 1 INPUT_DIRECTORIES_Map_1: 1 @@ -1472,7 +1472,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1507,7 +1507,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1542,7 +1542,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 0 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1726 + CACHE_HIT_BYTES: 1728 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 SELECTED_ROWGROUPS: 0 @@ -1653,7 +1653,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 2 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 6990 + CACHE_HIT_BYTES: 6982 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1691,7 +1691,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 2 RECORDS_OUT_OPERATOR_TS_0: 100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 6990 + CACHE_HIT_BYTES: 6982 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1709,7 +1709,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n2 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4896 + HDFS_BYTES_READ: 4892 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1732,7 +1732,7 @@ Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 524288 ALLOCATED_USED_BYTES: 8527 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 4896 + CACHE_MISS_BYTES: 4892 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1749,7 +1749,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n2 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1750 + HDFS_BYTES_READ: 1759 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1771,8 +1771,8 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 4920 - CACHE_MISS_BYTES: 1750 + CACHE_HIT_BYTES: 4916 + CACHE_MISS_BYTES: 1759 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 @@ -1809,7 +1809,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 2 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 4920 + CACHE_HIT_BYTES: 4916 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -1847,7 +1847,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 2 RECORDS_OUT_OPERATOR_TS_0: 100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 6670 + CACHE_HIT_BYTES: 6675 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1866,7 +1866,7 @@ PREHOOK: Input: default@orc_ppd_staging_n1 PREHOOK: Output: database:default PREHOOK: Output: default@orc_ppd_1 Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 10569 + HDFS_BYTES_READ: 10583 HDFS_BYTES_WRITTEN: 1467 HDFS_READ_OPS: 6 HDFS_LARGE_READ_OPS: 0 @@ -1886,8 +1886,8 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 2359296 ALLOCATED_USED_BYTES: 44198 - CACHE_HIT_BYTES: 30613 - CACHE_MISS_BYTES: 10569 + CACHE_HIT_BYTES: 30620 + CACHE_MISS_BYTES: 10583 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 diff --git ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out index e197126630..8a6a004896 100644 --- ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out +++ ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out @@ -207,7 +207,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17010 + HDFS_BYTES_READ: 17011 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 7 HDFS_LARGE_READ_OPS: 0 @@ -228,9 +228,9 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1048576 - ALLOCATED_USED_BYTES: 382 + ALLOCATED_USED_BYTES: 381 CACHE_HIT_BYTES: 0 - CACHE_MISS_BYTES: 359 + CACHE_MISS_BYTES: 354 METADATA_CACHE_MISS: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -289,7 +289,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -307,7 +307,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 720 + HDFS_BYTES_READ: 717 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -329,8 +329,8 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 359 - CACHE_MISS_BYTES: 720 + CACHE_HIT_BYTES: 354 + CACHE_MISS_BYTES: 717 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 @@ -367,7 +367,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -405,7 +405,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -447,7 +447,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -507,7 +507,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -545,7 +545,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -583,7 +583,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -621,7 +621,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -663,7 +663,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -723,7 +723,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -761,7 +761,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -799,7 +799,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -837,7 +837,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -879,7 +879,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 0 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -939,7 +939,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -977,7 +977,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 8 RECORDS_OUT_OPERATOR_TS_0: 1000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 @@ -1015,7 +1015,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 359 + CACHE_HIT_BYTES: 354 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -1053,7 +1053,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 18 RECORDS_OUT_OPERATOR_TS_0: 2000 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 1079 + CACHE_HIT_BYTES: 1071 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 2 @@ -1075,7 +1075,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16900 + HDFS_BYTES_READ: 16905 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1107,7 +1107,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 + HDFS_BYTES_READ: 17728 HDFS_BYTES_WRITTEN: 104 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1139,7 +1139,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16900 + HDFS_BYTES_READ: 16905 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1171,7 +1171,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 + HDFS_BYTES_READ: 17728 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1203,7 +1203,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 16900 + HDFS_BYTES_READ: 16905 HDFS_BYTES_WRITTEN: 102 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1235,7 +1235,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 17730 + HDFS_BYTES_READ: 17728 HDFS_BYTES_WRITTEN: 102 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1267,7 +1267,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4896 + HDFS_BYTES_READ: 4892 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1290,7 +1290,7 @@ Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 524288 ALLOCATED_USED_BYTES: 8527 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 4896 + CACHE_MISS_BYTES: 4892 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1307,7 +1307,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1750 + HDFS_BYTES_READ: 1759 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1329,8 +1329,8 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 4920 - CACHE_MISS_BYTES: 1750 + CACHE_HIT_BYTES: 4916 + CACHE_MISS_BYTES: 1759 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 1 NUM_VECTOR_BATCHES: 1 @@ -1351,7 +1351,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 21443 + HDFS_BYTES_READ: 21446 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1383,7 +1383,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 23321 + HDFS_BYTES_READ: 23332 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1419,7 +1419,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 21443 + HDFS_BYTES_READ: 21446 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1451,7 +1451,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 23321 + HDFS_BYTES_READ: 23332 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1483,7 +1483,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 4322 + HDFS_BYTES_READ: 4326 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1504,9 +1504,9 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 1048576 - ALLOCATED_USED_BYTES: 11434 + ALLOCATED_USED_BYTES: 11436 CACHE_HIT_BYTES: 24 - CACHE_MISS_BYTES: 4322 + CACHE_MISS_BYTES: 4326 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1523,7 +1523,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 1589 + HDFS_BYTES_READ: 1585 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 4 HDFS_LARGE_READ_OPS: 0 @@ -1545,8 +1545,8 @@ Stage-1 HIVE COUNTERS: Stage-1 LLAP IO COUNTERS: ALLOCATED_BYTES: 262144 ALLOCATED_USED_BYTES: 2376 - CACHE_HIT_BYTES: 4346 - CACHE_MISS_BYTES: 1589 + CACHE_HIT_BYTES: 4350 + CACHE_MISS_BYTES: 1585 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 NUM_VECTOR_BATCHES: 3 @@ -1567,7 +1567,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20860 + HDFS_BYTES_READ: 20864 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1599,7 +1599,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22586 + HDFS_BYTES_READ: 22592 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1635,7 +1635,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20860 + HDFS_BYTES_READ: 20864 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1667,7 +1667,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22586 + HDFS_BYTES_READ: 22592 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1703,7 +1703,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 20860 + HDFS_BYTES_READ: 20864 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1735,7 +1735,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 22586 + HDFS_BYTES_READ: 22592 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1791,7 +1791,7 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_SEL_2: 6 RECORDS_OUT_OPERATOR_TS_0: 2100 Stage-1 LLAP IO COUNTERS: - CACHE_HIT_BYTES: 4346 + CACHE_HIT_BYTES: 4350 CACHE_MISS_BYTES: 0 METADATA_CACHE_HIT: 2 NUM_DECODED_BATCHES: 3 @@ -1851,7 +1851,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 18628 + HDFS_BYTES_READ: 18631 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1883,7 +1883,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 18628 + HDFS_BYTES_READ: 18631 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1915,7 +1915,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 19952 + HDFS_BYTES_READ: 19950 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 @@ -1947,7 +1947,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@orc_ppd_n3 PREHOOK: Output: hdfs://### HDFS PATH ### Stage-1 FILE SYSTEM COUNTERS: - HDFS_BYTES_READ: 19952 + HDFS_BYTES_READ: 19950 HDFS_BYTES_WRITTEN: 101 HDFS_READ_OPS: 5 HDFS_LARGE_READ_OPS: 0 diff --git ql/src/test/results/clientpositive/llap/order_null.q.out ql/src/test/results/clientpositive/llap/order_null.q.out index 8c37d52f31..c380844749 100644 --- ql/src/test/results/clientpositive/llap/order_null.q.out +++ ql/src/test/results/clientpositive/llap/order_null.q.out @@ -161,9 +161,9 @@ POSTHOOK: Input: default@src_null_n1 1 A 2 A 2 B -NULL NULL 2 NULL 3 NULL +NULL NULL PREHOOK: query: SELECT x.* FROM src_null_n1 x ORDER BY b desc nulls last, a PREHOOK: type: QUERY PREHOOK: Input: default@src_null_n1 @@ -175,9 +175,9 @@ POSTHOOK: Input: default@src_null_n1 2 B 1 A 2 A -NULL NULL 2 NULL 3 NULL +NULL NULL PREHOOK: query: SELECT x.* FROM src_null_n1 x ORDER BY a asc nulls last, b desc PREHOOK: type: QUERY PREHOOK: Input: default@src_null_n1 diff --git ql/src/test/results/clientpositive/llap/partialdhj.q.out ql/src/test/results/clientpositive/llap/partialdhj.q.out index ec0044f42b..4546d85c08 100644 --- ql/src/test/results/clientpositive/llap/partialdhj.q.out +++ ql/src/test/results/clientpositive/llap/partialdhj.q.out @@ -110,6 +110,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: 32 Data size: 2848 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Group By Operator keys: _col0 (type: string) @@ -147,6 +148,7 @@ STAGE PLANS: input vertices: 0 Reducer 3 Statistics: Num rows: 26 Data size: 6942 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true File Output Operator compressed: false @@ -343,6 +345,7 @@ STAGE PLANS: input vertices: 1 Reducer 5 Statistics: Num rows: 26 Data size: 6942 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true File Output Operator compressed: false @@ -364,6 +367,7 @@ STAGE PLANS: input vertices: 1 Map 6 Statistics: Num rows: 32 Data size: 2848 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Group By Operator keys: _col0 (type: string) diff --git ql/src/test/results/clientpositive/llap/ptf.q.out ql/src/test/results/clientpositive/llap/ptf.q.out index 7a067f7797..bf21798568 100644 --- ql/src/test/results/clientpositive/llap/ptf.q.out +++ ql/src/test/results/clientpositive/llap/ptf.q.out @@ -60,7 +60,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -87,7 +87,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -272,7 +272,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -299,7 +299,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -426,7 +426,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -551,7 +551,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -578,7 +578,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -731,7 +731,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -758,7 +758,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -914,7 +914,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -950,7 +950,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1115,7 +1115,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1292,7 +1292,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1398,7 +1398,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST, p_size DESC NULLS LAST + order by: p_name ASC NULLS LAST, p_size DESC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int partition by: p_mfgr raw input shape: @@ -1428,7 +1428,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1455,7 +1455,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1572,7 +1572,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST + order by: p_name ASC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int, p_retailprice: double partition by: p_mfgr raw input shape: @@ -1603,7 +1603,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1631,7 +1631,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1780,7 +1780,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1807,7 +1807,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1959,7 +1959,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1973,7 +1973,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1981,7 +1981,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2009,7 +2009,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2017,7 +2017,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2044,7 +2044,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2203,7 +2203,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2230,7 +2230,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2402,7 +2402,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2449,7 +2449,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2613,7 +2613,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2788,7 +2788,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST output shape: _col0: string, _col1: string, _col2: double partition by: _col0 raw input shape: @@ -2815,7 +2815,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -3006,7 +3006,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -3039,7 +3039,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3120,7 +3120,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3157,7 +3157,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col3 raw input shape: window functions: @@ -3464,14 +3464,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3485,7 +3485,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3493,7 +3493,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3521,7 +3521,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3529,7 +3529,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3739,14 +3739,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3773,7 +3773,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3800,7 +3800,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3827,7 +3827,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4005,14 +4005,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4039,14 +4039,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4073,7 +4073,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4252,14 +4252,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4286,7 +4286,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4300,7 +4300,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4329,7 +4329,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4537,7 +4537,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4551,7 +4551,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4559,7 +4559,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4587,7 +4587,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4595,7 +4595,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4622,7 +4622,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2, _col1 raw input shape: window functions: @@ -4796,14 +4796,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4817,7 +4817,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4846,7 +4846,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4874,7 +4874,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/ptf_matchpath.q.out ql/src/test/results/clientpositive/llap/ptf_matchpath.q.out index abb05ff133..a1a34169a6 100644 --- ql/src/test/results/clientpositive/llap/ptf_matchpath.q.out +++ ql/src/test/results/clientpositive/llap/ptf_matchpath.q.out @@ -98,7 +98,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: _col6 raw input shape: @@ -226,7 +226,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col6 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: 0 raw input shape: @@ -352,7 +352,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col6 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: 0 raw input shape: diff --git ql/src/test/results/clientpositive/llap/ptf_streaming.q.out ql/src/test/results/clientpositive/llap/ptf_streaming.q.out index d5369453a6..8f5cf5ea36 100644 --- ql/src/test/results/clientpositive/llap/ptf_streaming.q.out +++ ql/src/test/results/clientpositive/llap/ptf_streaming.q.out @@ -60,7 +60,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -87,7 +87,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -272,7 +272,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -299,7 +299,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -445,7 +445,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -569,7 +569,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: p_name ASC NULLS FIRST, p_size DESC NULLS LAST + order by: p_name ASC NULLS LAST, p_size DESC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int partition by: p_mfgr raw input shape: @@ -599,7 +599,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -626,7 +626,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -743,7 +743,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: p_name ASC NULLS FIRST + order by: p_name ASC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int, p_retailprice: double partition by: p_mfgr raw input shape: @@ -774,7 +774,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -802,7 +802,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -954,7 +954,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -968,7 +968,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -976,7 +976,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1004,7 +1004,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1012,7 +1012,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1039,7 +1039,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1193,7 +1193,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1207,7 +1207,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1215,7 +1215,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1243,7 +1243,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1251,7 +1251,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1278,7 +1278,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1432,7 +1432,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1446,7 +1446,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1454,7 +1454,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1482,7 +1482,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1490,7 +1490,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1517,7 +1517,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1691,7 +1691,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1738,7 +1738,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1925,14 +1925,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1946,7 +1946,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -1954,7 +1954,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -1982,7 +1982,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -1990,7 +1990,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2200,14 +2200,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2234,7 +2234,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2261,7 +2261,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2288,7 +2288,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2464,14 +2464,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2485,7 +2485,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2514,7 +2514,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2542,7 +2542,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/reopt_dpp.q.out ql/src/test/results/clientpositive/llap/reopt_dpp.q.out index 31726f64d4..e248575de1 100644 --- ql/src/test/results/clientpositive/llap/reopt_dpp.q.out +++ ql/src/test/results/clientpositive/llap/reopt_dpp.q.out @@ -244,7 +244,7 @@ Stage-0 Output:["_col0"] Filter Operator [FIL_24] (runtime: rows=1 width=8) predicate:((d_year = 2000) and d_date_sk is not null) - TableScan [TS_3] (runtime: rows=2 width=8) + TableScan [TS_3] (runtime: rows=1 width=8) default@x1_date_dim,d,Tbl:COMPLETE,Col:COMPLETE,Output:["d_date_sk","d_year"] Dynamic Partitioning Event Operator [EVENT_29] (runtime: rows=1 width=8) Group By Operator [GBY_28] (runtime: rows=1 width=8) diff --git ql/src/test/results/clientpositive/llap/reopt_semijoin.q.out ql/src/test/results/clientpositive/llap/reopt_semijoin.q.out index 7518ae638d..37143cbf96 100644 --- ql/src/test/results/clientpositive/llap/reopt_semijoin.q.out +++ ql/src/test/results/clientpositive/llap/reopt_semijoin.q.out @@ -336,7 +336,7 @@ STAGE PLANS: TableScan alias: d filterExpr: ((d_moy = 3) and d_date_sk is not null) (type: boolean) - Statistics: (RUNTIME) Num rows: 8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: (RUNTIME) Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((d_moy = 3) and d_date_sk is not null) (type: boolean) Statistics: (RUNTIME) Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE diff --git ql/src/test/results/clientpositive/llap/results_cache_2.q.out ql/src/test/results/clientpositive/llap/results_cache_2.q.out index 6f8d52f630..b17c58df64 100644 --- ql/src/test/results/clientpositive/llap/results_cache_2.q.out +++ ql/src/test/results/clientpositive/llap/results_cache_2.q.out @@ -221,14 +221,14 @@ STAGE PLANS: TableScan alias: src filterExpr: (UDFToDouble(key) < 10.0D) (type: boolean) - Statistics: Num rows: 500/500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 500/1 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (UDFToDouble(key) < 10.0D) (type: boolean) - Statistics: Num rows: 166/10 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 166/1 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: sign(value) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 166/10 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 166/1 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() keys: _col0 (type: double) diff --git ql/src/test/results/clientpositive/llap/runtime_stats_hs2.q.out ql/src/test/results/clientpositive/llap/runtime_stats_hs2.q.out index 76cee83f0b..a94fb41fca 100644 --- ql/src/test/results/clientpositive/llap/runtime_stats_hs2.q.out +++ ql/src/test/results/clientpositive/llap/runtime_stats_hs2.q.out @@ -123,19 +123,19 @@ Stage-0 <-Map 1 [SIMPLE_EDGE] vectorized, llap SHUFFLE [RS_33] PartitionCols:_col0 - Select Operator [SEL_32] (runtime: rows=5 width=4) + Select Operator [SEL_32] (runtime: rows=1 width=4) Output:["_col0"] - Filter Operator [FIL_31] (runtime: rows=5 width=4) + Filter Operator [FIL_31] (runtime: rows=1 width=4) predicate:((u < 10) and (u > 2)) - TableScan [TS_0] (runtime: rows=8 width=4) + TableScan [TS_0] (runtime: rows=1 width=4) default@tx_n3,tx_n3,Tbl:COMPLETE,Col:COMPLETE,Output:["u"] <-Map 4 [SIMPLE_EDGE] vectorized, llap SHUFFLE [RS_36] PartitionCols:_col0 - Select Operator [SEL_35] (runtime: rows=3 width=4) + Select Operator [SEL_35] (runtime: rows=1 width=4) Output:["_col0"] - Filter Operator [FIL_34] (runtime: rows=3 width=4) + Filter Operator [FIL_34] (runtime: rows=1 width=4) predicate:((p < 10) and (p > 2)) - TableScan [TS_3] (runtime: rows=5 width=4) + TableScan [TS_3] (runtime: rows=1 width=4) default@px_n0,px_n0,Tbl:COMPLETE,Col:COMPLETE,Output:["p"] diff --git ql/src/test/results/clientpositive/llap/schema_evol_undecorated.q.out ql/src/test/results/clientpositive/llap/schema_evol_undecorated.q.out new file mode 100644 index 0000000000..2cbdb4cb65 --- /dev/null +++ ql/src/test/results/clientpositive/llap/schema_evol_undecorated.q.out @@ -0,0 +1,64 @@ +PREHOOK: query: create external table new_char_decimal (c1 char(20)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@new_char_decimal +POSTHOOK: query: create external table new_char_decimal (c1 char(20)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@new_char_decimal +PREHOOK: query: alter table new_char_decimal change c1 c1 decimal(31,0) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@new_char_decimal +PREHOOK: Output: default@new_char_decimal +POSTHOOK: query: alter table new_char_decimal change c1 c1 decimal(31,0) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@new_char_decimal +POSTHOOK: Output: default@new_char_decimal +PREHOOK: query: create external table new_varchar_decimal (c1 varchar(25)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@new_varchar_decimal +POSTHOOK: query: create external table new_varchar_decimal (c1 varchar(25)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@new_varchar_decimal +PREHOOK: query: alter table new_varchar_decimal change c1 c1 decimal(12,5) +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@new_varchar_decimal +PREHOOK: Output: default@new_varchar_decimal +POSTHOOK: query: alter table new_varchar_decimal change c1 c1 decimal(12,5) +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@new_varchar_decimal +POSTHOOK: Output: default@new_varchar_decimal +PREHOOK: query: create external table new_char_double (c1 char(20)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@new_char_double +POSTHOOK: query: create external table new_char_double (c1 char(20)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@new_char_double +PREHOOK: query: alter table new_char_double change c1 c1 double +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@new_char_double +PREHOOK: Output: default@new_char_double +POSTHOOK: query: alter table new_char_double change c1 c1 double +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@new_char_double +POSTHOOK: Output: default@new_char_double +PREHOOK: query: create external table new_varchar_double (c1 varchar(25)) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@new_varchar_double +POSTHOOK: query: create external table new_varchar_double (c1 varchar(25)) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@new_varchar_double +PREHOOK: query: alter table new_varchar_double change c1 c1 double +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: default@new_varchar_double +PREHOOK: Output: default@new_varchar_double +POSTHOOK: query: alter table new_varchar_double change c1 c1 double +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: default@new_varchar_double +POSTHOOK: Output: default@new_varchar_double diff --git ql/src/test/results/clientpositive/llap/semijoin.q.out ql/src/test/results/clientpositive/llap/semijoin.q.out index f34373223e..3ac562ceba 100644 --- ql/src/test/results/clientpositive/llap/semijoin.q.out +++ ql/src/test/results/clientpositive/llap/semijoin.q.out @@ -1996,7 +1996,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Semi Join 1 to 2 keys: 0 key (type: int) @@ -2422,7 +2422,7 @@ STAGE PLANS: Merge Join Operator condition map: Left Semi Join 0 to 1 - Outer Join 0 to 2 + Full Outer Join 0 to 2 keys: 0 key (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/llap/sharedworkext.q.out ql/src/test/results/clientpositive/llap/sharedworkext.q.out index 531f073a92..5d8dcfcb4a 100644 --- ql/src/test/results/clientpositive/llap/sharedworkext.q.out +++ ql/src/test/results/clientpositive/llap/sharedworkext.q.out @@ -648,7 +648,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -706,7 +706,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out index cd20c3ab17..26e4355827 100644 --- ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out +++ ql/src/test/results/clientpositive/llap/skewjoinopt15.q.out @@ -258,14 +258,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n109 POSTHOOK: Input: default@t2_n66 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: EXPLAIN SELECT count(1) FROM T1_n109 a JOIN T2_n66 b ON a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out index 10879024e7..e4f24521d9 100644 --- ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out +++ ql/src/test/results/clientpositive/llap/smb_mapjoin_15.q.out @@ -155,7 +155,7 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE tag: -1 @@ -541,7 +541,7 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE tag: -1 @@ -818,7 +818,7 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE tag: -1 @@ -1095,7 +1095,7 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: COMPLETE tag: -1 diff --git ql/src/test/results/clientpositive/llap/smb_mapjoin_4.q.out ql/src/test/results/clientpositive/llap/smb_mapjoin_4.q.out index bbaf94b515..0c1ad420ae 100644 --- ql/src/test/results/clientpositive/llap/smb_mapjoin_4.q.out +++ ql/src/test/results/clientpositive/llap/smb_mapjoin_4.q.out @@ -464,7 +464,7 @@ STAGE PLANS: Merge Join Operator condition map: Left Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -838,7 +838,7 @@ STAGE PLANS: Merge Join Operator condition map: Right Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -1044,7 +1044,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -1161,7 +1161,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Right Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -1275,8 +1275,8 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/llap/smb_mapjoin_5.q.out ql/src/test/results/clientpositive/llap/smb_mapjoin_5.q.out index 6d273cdd29..6a63463df5 100644 --- ql/src/test/results/clientpositive/llap/smb_mapjoin_5.q.out +++ ql/src/test/results/clientpositive/llap/smb_mapjoin_5.q.out @@ -464,7 +464,7 @@ STAGE PLANS: Merge Join Operator condition map: Left Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -838,7 +838,7 @@ STAGE PLANS: Merge Join Operator condition map: Right Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -1044,7 +1044,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -1161,7 +1161,7 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Right Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -1275,8 +1275,8 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/llap/subquery_in.q.out ql/src/test/results/clientpositive/llap/subquery_in.q.out index 21801a338a..5a9021badf 100644 --- ql/src/test/results/clientpositive/llap/subquery_in.q.out +++ ql/src/test/results/clientpositive/llap/subquery_in.q.out @@ -345,7 +345,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -528,7 +528,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/subquery_in_having.q.out ql/src/test/results/clientpositive/llap/subquery_in_having.q.out index ab28bbafb0..1b25b5c56f 100644 --- ql/src/test/results/clientpositive/llap/subquery_in_having.q.out +++ ql/src/test/results/clientpositive/llap/subquery_in_having.q.out @@ -1508,7 +1508,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/subquery_notin.q.out ql/src/test/results/clientpositive/llap/subquery_notin.q.out index 15db5c099d..31c2abae81 100644 --- ql/src/test/results/clientpositive/llap/subquery_notin.q.out +++ ql/src/test/results/clientpositive/llap/subquery_notin.q.out @@ -421,7 +421,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -483,7 +483,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -696,7 +696,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -733,7 +733,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1004,7 +1004,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1042,7 +1042,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1082,7 +1082,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/subquery_scalar.q.out ql/src/test/results/clientpositive/llap/subquery_scalar.q.out index 60ddd72d5d..7e7f4fe3c1 100644 --- ql/src/test/results/clientpositive/llap/subquery_scalar.q.out +++ ql/src/test/results/clientpositive/llap/subquery_scalar.q.out @@ -1039,7 +1039,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1067,7 +1067,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out index d204b47499..2bc1d3e740 100644 --- ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out +++ ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_1.q.out @@ -410,11 +410,11 @@ order by a.csmallint POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 6 -13036 1 -8915 1 -3799 1 10782 1 +NULL 6 PREHOOK: query: explain select * from alltypesorc a left outer join alltypesorc b on a.cint = b.cint and a.csmallint != a.cint @@ -500,6 +500,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Reduce Output Operator key expressions: _col2 (type: int) @@ -639,6 +640,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Reduce Output Operator key expressions: _col2 (type: int) @@ -773,6 +775,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Group By Operator aggregations: count() @@ -908,6 +911,7 @@ STAGE PLANS: input vertices: 1 Map 5 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Group By Operator aggregations: count() @@ -976,8 +980,8 @@ order by cs POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 6 -13036 1 -8915 1 -3799 1 10782 1 +NULL 6 diff --git ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out index e44639d0d2..3f74be1eed 100644 --- ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out +++ ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out @@ -166,10 +166,10 @@ POSTHOOK: Input: default@src -19 8 626923679 NULL -19.0 8.0 821UdmGbkEf4j NULL 1969-12-31 15:59:46.619 1969-12-31 15:59:46.95 true NULL 6 8 528534767 NULL 6.0 8.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.459 1969-12-31 16:00:00.236 true NULL NULL 9 -470743566 -1887561756 NULL 9.0 swx5K33Sm5qcKR5B 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:07.318 true false -NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL -NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true +NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -7 19 528534767 NULL -7.0 19.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.994 1969-12-31 15:59:55.362 true NULL +NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true -45 20 253665376 NULL -45.0 20.0 1cGVWH7n1QU NULL 1969-12-31 16:00:09.949 1969-12-31 16:00:10.979 true NULL NULL 34 510824788 -1887561756 NULL 34.0 nj1bXoh6k 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:46.017 true false 41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL @@ -360,10 +360,10 @@ POSTHOOK: Input: default@src -19 8 626923679 NULL -19.0 8.0 821UdmGbkEf4j NULL 1969-12-31 15:59:46.619 1969-12-31 15:59:46.95 true NULL 6 8 528534767 NULL 6.0 8.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.459 1969-12-31 16:00:00.236 true NULL NULL 9 -470743566 -1887561756 NULL 9.0 swx5K33Sm5qcKR5B 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:07.318 true false -NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL -NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true +NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -7 19 528534767 NULL -7.0 19.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.994 1969-12-31 15:59:55.362 true NULL +NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true -45 20 253665376 NULL -45.0 20.0 1cGVWH7n1QU NULL 1969-12-31 16:00:09.949 1969-12-31 16:00:10.979 true NULL NULL 34 510824788 -1887561756 NULL 34.0 nj1bXoh6k 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:46.017 true false 41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL @@ -554,10 +554,10 @@ POSTHOOK: Input: default@src -19 8 626923679 NULL -19.0 8.0 821UdmGbkEf4j NULL 1969-12-31 15:59:46.619 1969-12-31 15:59:46.95 true NULL 6 8 528534767 NULL 6.0 8.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.459 1969-12-31 16:00:00.236 true NULL NULL 9 -470743566 -1887561756 NULL 9.0 swx5K33Sm5qcKR5B 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:07.318 true false -NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL -NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true +NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -7 19 528534767 NULL -7.0 19.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.994 1969-12-31 15:59:55.362 true NULL +NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true -45 20 253665376 NULL -45.0 20.0 1cGVWH7n1QU NULL 1969-12-31 16:00:09.949 1969-12-31 16:00:10.979 true NULL NULL 34 510824788 -1887561756 NULL 34.0 nj1bXoh6k 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:46.017 true false 41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL diff --git ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out index 990e3572f2..07f13bc270 100644 --- ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out +++ ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_3.q.out @@ -197,6 +197,115 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true + HybridGraceHashJoin: true + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: explain +select a.* +from alltypesorc a left outer join src b +on a.cint = cast(b.key as int) +limit 1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.* +from alltypesorc a left outer join src b +on a.cint = cast(b.key as int) +limit 1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE) + Reducer 3 <- Map 4 (CUSTOM_SIMPLE_EDGE), Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 12288 Data size: 3093170 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + Statistics: Num rows: 12288 Data size: 3093170 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + TopN Hash Memory Usage: 0.1 + value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: UDFToInteger(_col0) (type: int) + sort order: + + Map-reduce partition columns: UDFToInteger(_col0) (type: int) + Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: no inputs + Reducer 2 + Execution mode: vectorized, llap + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: int), VALUE._col3 (type: bigint), VALUE._col4 (type: float), VALUE._col5 (type: double), VALUE._col6 (type: string), VALUE._col7 (type: string), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: boolean), VALUE._col11 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + Limit + Number of rows: 1 + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col2 (type: int) + sort order: + + Map-reduce partition columns: _col2 (type: int) + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) + Reducer 3 + Execution mode: vectorized, llap + Reduce Operator Tree: + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + input vertices: + 1 Map 4 + Statistics: Num rows: 1 Data size: 310 Basic stats: COMPLETE Column stats: COMPLETE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Limit Number of rows: 1 diff --git ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out index 98b20132a6..0aed74e892 100644 --- ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out +++ ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out @@ -543,7 +543,7 @@ STAGE PLANS: Statistics: Num rows: 15 Data size: 240 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: bigint), _col1 (type: bigint) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 15 Data size: 240 Basic stats: COMPLETE Column stats: COMPLETE tag: -1 @@ -918,11 +918,11 @@ POSTHOOK: Input: default@l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104 POSTHOOK: Input: default@l3_clarity__l3_snap_number_2018022300104 POSTHOOK: Input: default@l3_monthly_dw_dimplan #### A masked pattern was here #### -7147200 NULL 27114 -7147200 NULL 27114 -7147200 NULL 27114 -7147200 NULL 27114 -7147200 NULL 27114 +7147200 189561 27114 +7147200 191205 27114 +7147200 195775 27114 +7147200 234349 27114 +7147200 350519 27114 Warning: Map Join MAPJOIN[48][bigTable=?] in task 'Map 1' is a cross product PREHOOK: query: EXPLAIN EXTENDED SELECT DW.PROJECT_OBJECT_ID, S1.PLAN_KEY as PLAN_KEY, S2.PROJECT_KEY AS PROJECT_KEY @@ -1042,7 +1042,7 @@ STAGE PLANS: Statistics: Num rows: 15 Data size: 240 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: bigint), _col1 (type: bigint) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 15 Data size: 240 Basic stats: COMPLETE Column stats: COMPLETE tag: -1 @@ -1418,8 +1418,8 @@ POSTHOOK: Input: default@l3_clarity__l3_monthly_dw_factplan_dw_stg_2018022300104 POSTHOOK: Input: default@l3_clarity__l3_snap_number_2018022300104 POSTHOOK: Input: default@l3_monthly_dw_dimplan #### A masked pattern was here #### -7147200 NULL 27114 -7147200 NULL 27114 -7147200 NULL 27114 -7147200 NULL 27114 -7147200 NULL 27114 +7147200 189561 27114 +7147200 191205 27114 +7147200 195775 27114 +7147200 234349 27114 +7147200 350519 27114 diff --git ql/src/test/results/clientpositive/llap/tez_input_counters.q.out ql/src/test/results/clientpositive/llap/tez_input_counters.q.out index f24906f1b2..d346f04278 100644 --- ql/src/test/results/clientpositive/llap/tez_input_counters.q.out +++ ql/src/test/results/clientpositive/llap/tez_input_counters.q.out @@ -1581,8 +1581,8 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_GBY_9: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_8: 1000 - RECORDS_OUT_OPERATOR_SEL_7: 1000 - RECORDS_OUT_OPERATOR_TS_0: 1000 + RECORDS_OUT_OPERATOR_SEL_7: 618 + RECORDS_OUT_OPERATOR_TS_0: 618 Stage-1 LLAP IO COUNTERS: CACHE_MISS_BYTES: 3812 NUM_DECODED_BATCHES: 618 @@ -1612,8 +1612,8 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 8 - RECORDS_OUT_OPERATOR_SEL_9: 8 - RECORDS_OUT_OPERATOR_TS_0: 8 + RECORDS_OUT_OPERATOR_SEL_9: 4 + RECORDS_OUT_OPERATOR_TS_0: 4 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 18 NUM_DECODED_BATCHES: 4 @@ -1715,8 +1715,8 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_GBY_11: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_10: 240 - RECORDS_OUT_OPERATOR_SEL_9: 240 - RECORDS_OUT_OPERATOR_TS_0: 240 + RECORDS_OUT_OPERATOR_SEL_9: 148 + RECORDS_OUT_OPERATOR_TS_0: 148 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 922 NUM_DECODED_BATCHES: 148 @@ -1825,10 +1825,10 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_OPERATOR_GBY_7: 74 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_RS_14: 240 - RECORDS_OUT_OPERATOR_SEL_11: 240 - RECORDS_OUT_OPERATOR_SEL_13: 240 + RECORDS_OUT_OPERATOR_SEL_11: 148 + RECORDS_OUT_OPERATOR_SEL_13: 148 RECORDS_OUT_OPERATOR_SEL_8: 74 - RECORDS_OUT_OPERATOR_TS_0: 240 + RECORDS_OUT_OPERATOR_TS_0: 148 TOTAL_TABLE_ROWS_WRITTEN: 240 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 922 @@ -2346,21 +2346,21 @@ Stage-1 HIVE COUNTERS: RECORDS_OUT_INTERMEDIATE_Map_4: 240 RECORDS_OUT_INTERMEDIATE_Reducer_2: 952 RECORDS_OUT_INTERMEDIATE_Reducer_3: 0 - RECORDS_OUT_OPERATOR_EVENT_30: 309 + RECORDS_OUT_OPERATOR_EVENT_30: 1 RECORDS_OUT_OPERATOR_FS_34: 1 - RECORDS_OUT_OPERATOR_GBY_29: 309 + RECORDS_OUT_OPERATOR_GBY_29: 1 RECORDS_OUT_OPERATOR_GBY_33: 1 RECORDS_OUT_OPERATOR_MAP_0: 0 RECORDS_OUT_OPERATOR_MERGEJOIN_25: 952 RECORDS_OUT_OPERATOR_RS_11: 952 RECORDS_OUT_OPERATOR_RS_27: 1000 RECORDS_OUT_OPERATOR_RS_32: 240 - RECORDS_OUT_OPERATOR_SEL_26: 1000 - RECORDS_OUT_OPERATOR_SEL_28: 1000 - RECORDS_OUT_OPERATOR_SEL_31: 240 + RECORDS_OUT_OPERATOR_SEL_26: 618 + RECORDS_OUT_OPERATOR_SEL_28: 618 + RECORDS_OUT_OPERATOR_SEL_31: 74 RECORDS_OUT_OPERATOR_SEL_9: 952 - RECORDS_OUT_OPERATOR_TS_0: 1000 - RECORDS_OUT_OPERATOR_TS_3: 240 + RECORDS_OUT_OPERATOR_TS_0: 618 + RECORDS_OUT_OPERATOR_TS_3: 74 Stage-1 LLAP IO COUNTERS: CACHE_HIT_BYTES: 3812 CACHE_MISS_BYTES: 922 diff --git ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out index dcc7e9d835..db483d1705 100644 --- ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out +++ ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_1.q.out @@ -410,11 +410,11 @@ order by a.csmallint POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 6 -13036 1 -8915 1 -3799 1 10782 1 +NULL 6 PREHOOK: query: explain select * @@ -499,6 +499,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Reduce Output Operator key expressions: _col2 (type: int) @@ -633,6 +634,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Group By Operator aggregations: count() @@ -768,6 +770,7 @@ STAGE PLANS: input vertices: 1 Map 5 Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true HybridGraceHashJoin: true Group By Operator aggregations: count() @@ -836,8 +839,8 @@ order by a.csmallint POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 6 -13036 1 -8915 1 -3799 1 10782 1 +NULL 6 diff --git ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out index e44639d0d2..3f74be1eed 100644 --- ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out +++ ql/src/test/results/clientpositive/llap/tez_vector_dynpart_hashjoin_2.q.out @@ -166,10 +166,10 @@ POSTHOOK: Input: default@src -19 8 626923679 NULL -19.0 8.0 821UdmGbkEf4j NULL 1969-12-31 15:59:46.619 1969-12-31 15:59:46.95 true NULL 6 8 528534767 NULL 6.0 8.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.459 1969-12-31 16:00:00.236 true NULL NULL 9 -470743566 -1887561756 NULL 9.0 swx5K33Sm5qcKR5B 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:07.318 true false -NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL -NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true +NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -7 19 528534767 NULL -7.0 19.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.994 1969-12-31 15:59:55.362 true NULL +NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true -45 20 253665376 NULL -45.0 20.0 1cGVWH7n1QU NULL 1969-12-31 16:00:09.949 1969-12-31 16:00:10.979 true NULL NULL 34 510824788 -1887561756 NULL 34.0 nj1bXoh6k 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:46.017 true false 41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL @@ -360,10 +360,10 @@ POSTHOOK: Input: default@src -19 8 626923679 NULL -19.0 8.0 821UdmGbkEf4j NULL 1969-12-31 15:59:46.619 1969-12-31 15:59:46.95 true NULL 6 8 528534767 NULL 6.0 8.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.459 1969-12-31 16:00:00.236 true NULL NULL 9 -470743566 -1887561756 NULL 9.0 swx5K33Sm5qcKR5B 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:07.318 true false -NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL -NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true +NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -7 19 528534767 NULL -7.0 19.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.994 1969-12-31 15:59:55.362 true NULL +NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true -45 20 253665376 NULL -45.0 20.0 1cGVWH7n1QU NULL 1969-12-31 16:00:09.949 1969-12-31 16:00:10.979 true NULL NULL 34 510824788 -1887561756 NULL 34.0 nj1bXoh6k 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:46.017 true false 41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL @@ -554,10 +554,10 @@ POSTHOOK: Input: default@src -19 8 626923679 NULL -19.0 8.0 821UdmGbkEf4j NULL 1969-12-31 15:59:46.619 1969-12-31 15:59:46.95 true NULL 6 8 528534767 NULL 6.0 8.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:45.459 1969-12-31 16:00:00.236 true NULL NULL 9 -470743566 -1887561756 NULL 9.0 swx5K33Sm5qcKR5B 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:07.318 true false -NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -62 10 528534767 NULL -62.0 10.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.265 1969-12-31 15:59:56.584 true NULL -NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true +NULL 10 813877020 -1645852809 NULL 10.0 4QG23O2GKF6BUe13O7A2C xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.851 false false -7 19 528534767 NULL -7.0 19.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 16:00:13.994 1969-12-31 15:59:55.362 true NULL +NULL 19 312515097 1864027286 NULL 19.0 ds5YqbRvhf3Sb2 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:56.211 false true -45 20 253665376 NULL -45.0 20.0 1cGVWH7n1QU NULL 1969-12-31 16:00:09.949 1969-12-31 16:00:10.979 true NULL NULL 34 510824788 -1887561756 NULL 34.0 nj1bXoh6k 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:46.017 true false 41 37 528534767 NULL 41.0 37.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:52.817 1969-12-31 15:59:53.672 true NULL diff --git ql/src/test/results/clientpositive/llap/union7.q.out ql/src/test/results/clientpositive/llap/union7.q.out index a157f36a95..612bf86cd8 100644 --- ql/src/test/results/clientpositive/llap/union7.q.out +++ ql/src/test/results/clientpositive/llap/union7.q.out @@ -128,20 +128,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 #### A masked pattern was here #### -128 1 -213 1 -278 1 -369 1 -tst1 1 10 -150 1 -238 1 -66 1 +128 1 146 1 +150 1 +213 1 224 1 +238 1 255 1 273 1 +278 1 311 1 +369 1 401 1 406 1 +66 1 98 1 +tst1 1 diff --git ql/src/test/results/clientpositive/llap/update_all_partitioned.q.out ql/src/test/results/clientpositive/llap/update_all_partitioned.q.out index 4ee1e2e4cf..78c02ceea2 100644 --- ql/src/test/results/clientpositive/llap/update_all_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/update_all_partitioned.q.out @@ -53,11 +53,11 @@ POSTHOOK: Input: default@acid_uap@ds=tomorrow 762 BLoMwUJ51ns6pd tomorrow 762 a10E76jX35YwquKCTA tomorrow 762 q5y2Vy1 tomorrow -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow 6981 Y5x3JuI3M8jngv5N tomorrow +6981 YdG61y00526u5 tomorrow PREHOOK: query: update acid_uap set b = 'fred' PREHOOK: type: QUERY PREHOOK: Input: default@acid_uap diff --git ql/src/test/results/clientpositive/llap/update_tmp_table.q.out ql/src/test/results/clientpositive/llap/update_tmp_table.q.out index 446a3797b8..746fb9f82e 100644 --- ql/src/test/results/clientpositive/llap/update_tmp_table.q.out +++ ql/src/test/results/clientpositive/llap/update_tmp_table.q.out @@ -50,7 +50,6 @@ POSTHOOK: query: select * from acid_utt order by a POSTHOOK: type: QUERY POSTHOOK: Input: default@acid_utt #### A masked pattern was here #### -NULL 0ruyd6Y50JpdGRf6HqD -1073279343 oj1YrV5Wa -1073051226 A34p7oRr2WvUJNf -1072910839 0iqrc5 @@ -60,3 +59,4 @@ NULL 0ruyd6Y50JpdGRf6HqD -1071363017 Anj0oF -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet +NULL 0ruyd6Y50JpdGRf6HqD diff --git ql/src/test/results/clientpositive/llap/update_where_partitioned.q.out ql/src/test/results/clientpositive/llap/update_where_partitioned.q.out index 1834e837c1..ac603b8176 100644 --- ql/src/test/results/clientpositive/llap/update_where_partitioned.q.out +++ ql/src/test/results/clientpositive/llap/update_where_partitioned.q.out @@ -53,11 +53,11 @@ POSTHOOK: Input: default@acid_uwp@ds=tomorrow 762 BLoMwUJ51ns6pd tomorrow 762 a10E76jX35YwquKCTA tomorrow 762 q5y2Vy1 tomorrow -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow 6981 Y5x3JuI3M8jngv5N tomorrow +6981 YdG61y00526u5 tomorrow PREHOOK: query: update acid_uwp set b = 'fred' where b = 'k17Am8uPHWk02cEf1jet' PREHOOK: type: QUERY PREHOOK: Input: default@acid_uwp @@ -99,8 +99,8 @@ POSTHOOK: Input: default@acid_uwp@ds=tomorrow 762 BLoMwUJ51ns6pd tomorrow 762 a10E76jX35YwquKCTA tomorrow 762 q5y2Vy1 tomorrow -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow 6981 Y5x3JuI3M8jngv5N tomorrow +6981 YdG61y00526u5 tomorrow diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out index b1fa6a74a4..a9971d4a48 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_9.q.out @@ -155,10 +155,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:decimal(38,18), 1:decimal(38,18), 2:decimal(38,18), 3:bigint Statistics: Num rows: 1 Data size: 456 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(38,18)), _col1 (type: decimal(38,18)), _col2 (type: decimal(38,18)), _col3 (type: bigint) Execution mode: vectorized, llap @@ -295,10 +294,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:double, 1:double, 2:double, 3:bigint Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap @@ -435,10 +433,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:timestamp, 1:timestamp, 2:double, 3:bigint Statistics: Num rows: 1 Data size: 136 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: timestamp), _col1 (type: timestamp), _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out index 46227d29c3..396afd3f8b 100644 --- ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_aggregate_without_gby.q.out @@ -95,10 +95,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:int, 1:string Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: string) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out index ab37a4d5cf..eb4b26285b 100644 --- ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out +++ ql/src/test/results/clientpositive/llap/vector_auto_smb_mapjoin_14.q.out @@ -106,6 +106,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -240,6 +243,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -448,6 +454,9 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Map 5 Map Operator Tree: TableScan @@ -494,6 +503,9 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -549,6 +561,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 6 Execution mode: vectorized, llap Reduce Vectorization: @@ -705,6 +720,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -844,6 +862,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1007,6 +1028,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1158,6 +1182,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1353,6 +1380,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1478,6 +1508,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1623,6 +1656,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1770,6 +1806,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: @@ -1944,6 +1983,9 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2_n40 Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-3 Dependency Collection @@ -2169,6 +2211,9 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vector_between_columns.q.out ql/src/test/results/clientpositive/llap/vector_between_columns.q.out index 182497685f..be0e5ca311 100644 --- ql/src/test/results/clientpositive/llap/vector_between_columns.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_columns.q.out @@ -196,6 +196,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -368,6 +371,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vector_between_in.q.out ql/src/test/results/clientpositive/llap/vector_between_in.q.out index 3bfd1aaddc..a2a765bb20 100644 --- ql/src/test/results/clientpositive/llap/vector_between_in.q.out +++ ql/src/test/results/clientpositive/llap/vector_between_in.q.out @@ -1639,9 +1639,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate IN (CAST("1969-10-26" AS POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 6041 true 17 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1650,9 +1650,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 9165 true 9 +NULL 3115 PREHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-30" AS DATE) AND CAST("1970-01-02" AS DATE) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1661,9 +1661,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-3 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 5974 true 84 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1672,9 +1672,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AN POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 3002 true 6172 +NULL 3115 PREHOOK: query: SELECT c0, count(1) from (SELECT cdate IN (CAST("1969-10-26" AS DATE), CAST("1969-07-14" AS DATE)) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1683,9 +1683,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate IN (CAST("1969-10-26" AS POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 6041 true 17 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1694,9 +1694,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 9165 true 9 +NULL 3115 PREHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-30" AS DATE) AND CAST("1970-01-02" AS DATE) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1705,9 +1705,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-3 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 5974 true 84 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1716,6 +1716,6 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AN POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 3002 true 6172 +NULL 3115 diff --git ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out index 6c461f0149..cabc2b747e 100644 --- ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out +++ ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out @@ -157,6 +157,7 @@ STAGE PLANS: className: VectorMapJoinInnerStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21 input vertices: 1 Map 4 @@ -571,6 +572,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3 input vertices: 1 Map 2 diff --git ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out index cd78689d97..781f990726 100644 --- ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_case_when_2.q.out @@ -158,7 +158,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -261,7 +261,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamps #### A masked pattern was here #### ctimestamp1 ctimestamp2 ctimestamp2_description ctimestamp2_description_2 ctimestamp2_description_3 field1 field_2 field_3 field_4 field_5 -NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 0004-09-22 18:26:29.519542222 0004-09-21 16:23:25.519542222 1800s or Earlier Old Old 4 0004-09-22 18:26:29.519542222 26 NULL 0005-09-22 0528-10-27 08:15:18.941718273 0528-10-26 06:12:14.941718273 1800s or Earlier Old Old 528 2018-03-08 23:04:59 15 NULL 0529-10-27 1319-02-02 16:31:57.778 1319-02-01 14:28:53.778 1800s or Earlier Old Old 1319 1319-02-02 16:31:57.778 31 NULL 1320-02-02 @@ -312,6 +311,7 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14 9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12 9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09 +NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT ctimestamp1, @@ -415,10 +415,10 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 2, 3] + keyColumns: 1:timestamp, 2:string, 3:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [10, 12, 13, 14, 11, 7, 16, 23] + valueColumns: 10:string, 12:string, 13:string, 14:int, 11:string, 7:int, 16:int, 23:date Statistics: Num rows: 51 Data size: 16000 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date) Execution mode: vectorized, llap @@ -443,7 +443,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -546,7 +546,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamps #### A masked pattern was here #### ctimestamp1 ctimestamp2 ctimestamp2_description ctimestamp2_description_2 ctimestamp2_description_3 field1 field_2 field_3 field_4 field_5 -NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 0004-09-22 18:26:29.519542222 0004-09-21 16:23:25.519542222 1800s or Earlier Old Old 4 0004-09-22 18:26:29.519542222 26 NULL 0005-09-22 0528-10-27 08:15:18.941718273 0528-10-26 06:12:14.941718273 1800s or Earlier Old Old 528 2018-03-08 23:04:59 15 NULL 0529-10-27 1319-02-02 16:31:57.778 1319-02-01 14:28:53.778 1800s or Earlier Old Old 1319 1319-02-02 16:31:57.778 31 NULL 1320-02-02 @@ -597,6 +596,7 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14 9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12 9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09 +NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT ctimestamp1, @@ -700,10 +700,10 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 2, 3] + keyColumns: 1:timestamp, 2:string, 3:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [15, 26, 36, 40, 42, 44, 46, 53] + valueColumns: 15:string, 26:string, 36:string, 40:int, 42:string, 44:int, 46:int, 53:date Statistics: Num rows: 51 Data size: 16000 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date) Execution mode: vectorized, llap @@ -728,7 +728,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -831,7 +831,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamps #### A masked pattern was here #### ctimestamp1 ctimestamp2 ctimestamp2_description ctimestamp2_description_2 ctimestamp2_description_3 field1 field_2 field_3 field_4 field_5 -NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 0004-09-22 18:26:29.519542222 0004-09-21 16:23:25.519542222 1800s or Earlier Old Old 4 0004-09-22 18:26:29.519542222 26 NULL 0005-09-22 0528-10-27 08:15:18.941718273 0528-10-26 06:12:14.941718273 1800s or Earlier Old Old 528 2018-03-08 23:04:59 15 NULL 0529-10-27 1319-02-02 16:31:57.778 1319-02-01 14:28:53.778 1800s or Earlier Old Old 1319 1319-02-02 16:31:57.778 31 NULL 1320-02-02 @@ -882,3 +881,4 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14 9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12 9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09 +NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out index 4ae125b890..c92eddfc81 100644 --- ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vector_char_mapjoin1.q.out @@ -178,6 +178,7 @@ STAGE PLANS: className: VectorMapJoinInnerStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 @@ -391,6 +392,7 @@ STAGE PLANS: className: VectorMapJoinInnerStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 0 Map 1 @@ -562,6 +564,7 @@ STAGE PLANS: className: VectorMapJoinInnerStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 0 Map 1 diff --git ql/src/test/results/clientpositive/llap/vector_coalesce.q.out ql/src/test/results/clientpositive/llap/vector_coalesce.q.out index bc00c98b7f..a65603b2cf 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce.q.out @@ -96,16 +96,16 @@ LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL NULL -413196097 -51.0 NULL -413196097 -NULL NULL -413553449 11.0 NULL -413553449 -NULL NULL -457224565 11.0 NULL -457224565 -NULL NULL -591488718 -51.0 NULL -591488718 -NULL NULL -656987896 8.0 NULL -656987896 -NULL NULL -670908417 8.0 NULL -670908417 -NULL NULL -738306196 -51.0 NULL -738306196 -NULL NULL -819152895 8.0 NULL -819152895 -NULL NULL -827212561 8.0 NULL -827212561 -NULL NULL -949587513 11.0 NULL -949587513 +NULL 00MmJs1fiJp37y60mj4Ej8 -698191930 -51.0 NULL 00MmJs1fiJp37y60mj4Ej8 +NULL 00PafC7v 349566607 -51.0 NULL 00PafC7v +NULL 00iT08 284688862 -51.0 NULL 00iT08 +NULL 00k3yt70n476d6UQA -391432229 8.0 NULL 00k3yt70n476d6UQA +NULL 014ILGhXxNY7g02hl0Xw 633097881 11.0 NULL 014ILGhXxNY7g02hl0Xw +NULL 02VRbSC5I 551634127 8.0 NULL 02VRbSC5I +NULL 02k5poW73QsWM 891702124 11.0 NULL 02k5poW73QsWM +NULL 02v8WnLuYDos3Cq -648704945 8.0 NULL 02v8WnLuYDos3Cq +NULL 02vDyIVT752 388584379 11.0 NULL 02vDyIVT752 +NULL 0333uXvwB3ADRa4aP1h 336245146 8.0 NULL 0333uXvwB3ADRa4aP1h PREHOOK: query: EXPLAIN VECTORIZATION ONLY EXPRESSION SELECT ctinyint, cdouble, cint, coalesce(ctinyint+10, (cdouble+log2(cint)), 0) as c FROM alltypesorc WHERE (ctinyint IS NULL) @@ -204,16 +204,16 @@ LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL NULL -1015272448 0.0 -NULL NULL -609074876 0.0 -NULL NULL -700300206 0.0 -NULL NULL -726473298 0.0 -NULL NULL -738747840 0.0 -NULL NULL -838810013 0.0 -NULL NULL -850295959 0.0 -NULL NULL -886426182 0.0 -NULL NULL -899422227 0.0 -NULL NULL -971543377 0.0 +NULL -16269.0 -378213344 0.0 +NULL -16274.0 -671342269 0.0 +NULL -16296.0 -146635689 0.0 +NULL -16296.0 593429004 -16266.855499800256 +NULL -16300.0 -860437234 0.0 +NULL -16306.0 384405526 -16277.481946165259 +NULL -16307.0 559926362 -16277.939338135451 +NULL -16309.0 -826497289 0.0 +NULL -16310.0 206154150 -16282.380851737113 +NULL -16379.0 -894716315 0.0 PREHOOK: query: EXPLAIN VECTORIZATION ONLY EXPRESSION SELECT cfloat, cbigint, coalesce(cfloat, cbigint, 0) as c FROM alltypesorc WHERE (cfloat IS NULL AND cbigint IS NULL) @@ -391,16 +391,16 @@ LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 1969-12-31 15:59:30.929 1969-12-31 15:59:30.929 -NULL 1969-12-31 15:59:30.929 1969-12-31 15:59:30.929 -NULL 1969-12-31 15:59:30.929 1969-12-31 15:59:30.929 -NULL 1969-12-31 15:59:43.63 1969-12-31 15:59:43.63 -NULL 1969-12-31 15:59:43.658 1969-12-31 15:59:43.658 -NULL 1969-12-31 15:59:43.672 1969-12-31 15:59:43.672 -NULL 1969-12-31 15:59:43.684 1969-12-31 15:59:43.684 -NULL 1969-12-31 15:59:43.703 1969-12-31 15:59:43.703 -NULL 1969-12-31 15:59:43.704 1969-12-31 15:59:43.704 -NULL 1969-12-31 15:59:43.709 1969-12-31 15:59:43.709 +1969-12-31 15:59:30.929 1969-12-31 15:59:55.451 1969-12-31 15:59:30.929 +1969-12-31 15:59:30.929 1969-12-31 15:59:55.451 1969-12-31 15:59:30.929 +1969-12-31 15:59:30.929 1969-12-31 15:59:58.174 1969-12-31 15:59:30.929 +1969-12-31 15:59:30.929 1969-12-31 15:59:58.456 1969-12-31 15:59:30.929 +1969-12-31 15:59:43.619 1969-12-31 16:00:14.793 1969-12-31 15:59:43.619 +1969-12-31 15:59:43.627 1969-12-31 16:00:03.679 1969-12-31 15:59:43.627 +1969-12-31 15:59:43.628 1969-12-31 15:59:55.451 1969-12-31 15:59:43.628 +1969-12-31 15:59:43.631 1969-12-31 16:00:06.612 1969-12-31 15:59:43.631 +1969-12-31 15:59:43.637 1969-12-31 15:59:58.174 1969-12-31 15:59:43.637 +1969-12-31 15:59:43.64 1969-12-31 15:59:58.174 1969-12-31 15:59:43.64 PREHOOK: query: EXPLAIN VECTORIZATION ONLY EXPRESSION SELECT cfloat, cbigint, coalesce(cfloat, cbigint) as c FROM alltypesorc WHERE (cfloat IS NULL AND cbigint IS NULL) diff --git ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out index 2fda57048e..1953826911 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce_3.q.out @@ -99,14 +99,15 @@ STAGE PLANS: 0 _col0 (type: bigint) 1 _col0 (type: bigint) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:bigint + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:bigint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 2] - smallTableMapping: [2] + projectedOutput: 0:bigint, 2:bigint + smallTableValueMapping: 2:bigint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col2 input vertices: 1 Map 2 @@ -169,10 +170,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 3 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out index e609d1459c..01b746d381 100644 --- ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_coalesce_4.q.out @@ -70,10 +70,10 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:int Statistics: Num rows: 2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -98,7 +98,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -142,5 +142,5 @@ POSTHOOK: query: select coalesce(a, b) from coalesce_test order by a, b POSTHOOK: type: QUERY POSTHOOK: Input: default@coalesce_test #### A masked pattern was here #### -NULL 1 +NULL diff --git ql/src/test/results/clientpositive/llap/vector_complex_all.q.out ql/src/test/results/clientpositive/llap/vector_complex_all.q.out index 4e1698da4d..b6247ef2b2 100644 --- ql/src/test/results/clientpositive/llap/vector_complex_all.q.out +++ ql/src/test/results/clientpositive/llap/vector_complex_all.q.out @@ -690,10 +690,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:string, 1:map, 2:array, 3:struct Statistics: Num rows: 1 Data size: 3440 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: map), _col2 (type: array), _col3 (type: struct) Execution mode: vectorized, llap @@ -730,10 +729,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap Map Vectorization: @@ -769,10 +766,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap Map Vectorization: @@ -953,10 +948,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -1178,10 +1172,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 13503 Data size: 4721072 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap @@ -1321,10 +1315,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 13503 Data size: 7697400 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_complex_join.q.out ql/src/test/results/clientpositive/llap/vector_complex_join.q.out index b9088946fb..d222460687 100644 --- ql/src/test/results/clientpositive/llap/vector_complex_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_complex_join.q.out @@ -73,6 +73,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 input vertices: 1 Map 2 @@ -254,6 +255,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2 input vertices: 1 Map 2 @@ -399,6 +401,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2 input vertices: 1 Map 2 diff --git ql/src/test/results/clientpositive/llap/vector_data_types.q.out ql/src/test/results/clientpositive/llap/vector_data_types.q.out index 6f61adcab0..9bd7bc1eea 100644 --- ql/src/test/results/clientpositive/llap/vector_data_types.q.out +++ ql/src/test/results/clientpositive/llap/vector_data_types.q.out @@ -179,10 +179,6 @@ POSTHOOK: query: SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_ POSTHOOK: type: QUERY POSTHOOK: Input: default@over1korc_n1 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL 374 65560 4294967516 65.43 22.48 true oscar quirinius 2013-03-01 09:11:58.703316 16.86 mathematics -NULL 409 65536 4294967490 46.97 25.92 false fred miller 2013-03-01 09:11:58.703116 33.45 history -NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703111 18.80 mathematics -3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95.39 undecided -3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88.46 wind surfing -3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75.39 education @@ -199,6 +195,10 @@ NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703 -2 461 65648 4294967425 58.52 24.85 false rachel thompson 2013-03-01 09:11:58.703318 85.62 zync studies -1 268 65778 4294967418 56.33 44.73 true calvin falkner 2013-03-01 09:11:58.70322 7.37 history -1 281 65643 4294967323 15.1 45.0 false irene nixon 2013-03-01 09:11:58.703223 80.96 undecided +-1 300 65663 4294967343 71.26 34.62 true calvin ovid 2013-03-01 09:11:58.703262 78.56 study skills +-1 348 65556 4294967413 35.17 9.51 false bob young 2013-03-01 09:11:58.70328 45.81 quiet hour +-1 372 65680 4294967490 15.45 18.09 false ethan laertes 2013-03-01 09:11:58.70311 65.88 opthamology +-1 417 65685 4294967492 28.89 5.19 true mike white 2013-03-01 09:11:58.703275 90.69 forestry PREHOOK: query: SELECT SUM(HASH(*)) FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i) as q PREHOOK: type: QUERY @@ -314,10 +314,6 @@ POSTHOOK: query: SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_ POSTHOOK: type: QUERY POSTHOOK: Input: default@over1korc_n1 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL 374 65560 4294967516 65.43 22.48 true oscar quirinius 2013-03-01 09:11:58.703316 16.86 mathematics -NULL 409 65536 4294967490 46.97 25.92 false fred miller 2013-03-01 09:11:58.703116 33.45 history -NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703111 18.80 mathematics -3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95.39 undecided -3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88.46 wind surfing -3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75.39 education @@ -334,6 +330,10 @@ NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703 -2 461 65648 4294967425 58.52 24.85 false rachel thompson 2013-03-01 09:11:58.703318 85.62 zync studies -1 268 65778 4294967418 56.33 44.73 true calvin falkner 2013-03-01 09:11:58.70322 7.37 history -1 281 65643 4294967323 15.1 45.0 false irene nixon 2013-03-01 09:11:58.703223 80.96 undecided +-1 300 65663 4294967343 71.26 34.62 true calvin ovid 2013-03-01 09:11:58.703262 78.56 study skills +-1 348 65556 4294967413 35.17 9.51 false bob young 2013-03-01 09:11:58.70328 45.81 quiet hour +-1 372 65680 4294967490 15.45 18.09 false ethan laertes 2013-03-01 09:11:58.70311 65.88 opthamology +-1 417 65685 4294967492 28.89 5.19 true mike white 2013-03-01 09:11:58.703275 90.69 forestry PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(HASH(*)) FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i) as q diff --git ql/src/test/results/clientpositive/llap/vector_date_1.q.out ql/src/test/results/clientpositive/llap/vector_date_1.q.out index 8ff6ddf28b..1d16d65b82 100644 --- ql/src/test/results/clientpositive/llap/vector_date_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_date_1.q.out @@ -58,9 +58,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### vector_date_1.dt1 vector_date_1.dt2 -NULL NULL 1999-12-31 2000-01-01 2001-01-01 2001-06-01 +NULL NULL PREHOOK: query: explain vectorization detail select dt1, dt2, @@ -128,10 +128,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:date native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 3, 4, 5, 6, 7, 8, 9, 10] + valueColumns: 1:date, 3:boolean, 4:boolean, 5:boolean, 6:boolean, 7:boolean, 8:boolean, 9:boolean, 10:boolean Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean) Execution mode: vectorized, llap @@ -156,7 +156,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -223,9 +223,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 dt2 _c2 _c3 _c4 _c5 _c6 _c7 _c8 _c9 -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 2000-01-01 true true true true true true true true 2001-01-01 2001-06-01 true true true true true true true true +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, dt2, @@ -293,10 +293,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:date native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 3, 4, 5, 6, 7, 8, 9, 10] + valueColumns: 1:date, 3:boolean, 4:boolean, 5:boolean, 6:boolean, 7:boolean, 8:boolean, 9:boolean, 10:boolean Statistics: Num rows: 3 Data size: 336 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean) Execution mode: vectorized, llap @@ -321,7 +321,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -388,9 +388,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 dt2 _c2 _c3 _c4 _c5 _c6 _c7 _c8 _c9 -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 2000-01-01 false false false false false false false false 2001-01-01 2001-06-01 false false false false false false false false +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, @@ -458,10 +458,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:date native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6, 7] + valueColumns: 3:boolean, 4:boolean, 5:boolean, 6:boolean, 7:boolean Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean) Execution mode: vectorized, llap @@ -486,7 +486,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -553,9 +553,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 _c1 _c2 _c3 _c4 _c5 _c6 _c7 _c8 -NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 true true true true true true true true 2001-01-01 true true true true true true true true +NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, @@ -623,10 +623,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:date native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6, 7] + valueColumns: 3:boolean, 4:boolean, 5:boolean, 6:boolean, 7:boolean Statistics: Num rows: 3 Data size: 168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean) Execution mode: vectorized, llap @@ -651,7 +651,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -718,9 +718,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 _c1 _c2 _c3 _c4 _c5 _c6 _c7 _c8 -NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 false false false false false false false false 2001-01-01 false false false false false false false false +NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, dt2 @@ -793,10 +793,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:date native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:date Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: date) Execution mode: vectorized, llap @@ -821,7 +821,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out index fd934f0864..4c81131cd0 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_1.q.out @@ -87,10 +87,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -114,7 +113,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -158,8 +157,8 @@ POSTHOOK: query: select cast(t as boolean) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL true +NULL PREHOOK: query: explain vectorization detail select cast(t as tinyint) from decimal_1 order by t PREHOOK: type: QUERY @@ -204,10 +203,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -231,7 +229,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -275,8 +273,8 @@ POSTHOOK: query: select cast(t as tinyint) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as smallint) from decimal_1 order by t PREHOOK: type: QUERY @@ -321,10 +319,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:smallint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -348,7 +345,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -392,8 +389,8 @@ POSTHOOK: query: select cast(t as smallint) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as int) from decimal_1 order by t PREHOOK: type: QUERY @@ -438,10 +435,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -465,7 +461,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -509,8 +505,8 @@ POSTHOOK: query: select cast(t as int) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as bigint) from decimal_1 order by t PREHOOK: type: QUERY @@ -555,10 +551,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -582,7 +577,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -626,8 +621,8 @@ POSTHOOK: query: select cast(t as bigint) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as float) from decimal_1 order by t PREHOOK: type: QUERY @@ -672,10 +667,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -699,7 +693,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -743,8 +737,8 @@ POSTHOOK: query: select cast(t as float) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17.29 +NULL PREHOOK: query: explain vectorization detail select cast(t as double) from decimal_1 order by t PREHOOK: type: QUERY @@ -789,10 +783,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -816,7 +809,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -860,8 +853,8 @@ POSTHOOK: query: select cast(t as double) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17.29 +NULL PREHOOK: query: explain vectorization detail select cast(t as string) from decimal_1 order by t PREHOOK: type: QUERY @@ -906,10 +899,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -933,7 +925,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -977,8 +969,8 @@ POSTHOOK: query: select cast(t as string) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17.29 +NULL PREHOOK: query: explain vectorization detail select cast(t as timestamp) from decimal_1 order by t PREHOOK: type: QUERY @@ -1023,10 +1015,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5] + keyColumns: 5:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1050,7 +1041,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1094,8 +1085,8 @@ POSTHOOK: query: select cast(t as timestamp) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 1970-01-01 00:00:17.29 +NULL PREHOOK: query: drop table decimal_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_1 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out index 3170625d2f..b61ffca111 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_10_0.q.out @@ -76,10 +76,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 224 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -103,7 +102,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -147,8 +146,8 @@ POSTHOOK: query: SELECT `dec` FROM `DECIMAL` order by `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal #### A masked pattern was here #### -NULL 1000000000 +NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT `dec` FROM `decimal_txt` order by `dec` PREHOOK: type: QUERY @@ -192,10 +191,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -219,7 +217,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -263,8 +261,8 @@ POSTHOOK: query: SELECT `dec` FROM `decimal_txt` order by `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_txt #### A masked pattern was here #### -NULL 1000000000 +NULL PREHOOK: query: DROP TABLE DECIMAL_txt PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_txt diff --git ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out index 24873a46cc..c05f9fe69a 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_2.q.out @@ -65,10 +65,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -92,7 +91,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -181,10 +180,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -208,7 +206,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -297,10 +295,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:smallint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -324,7 +321,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -413,10 +410,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -440,7 +436,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -529,10 +525,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -556,7 +551,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -645,10 +640,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -672,7 +666,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -761,10 +755,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -788,7 +781,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -877,10 +870,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -904,7 +896,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1004,10 +996,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1031,7 +1022,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1120,10 +1111,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1147,7 +1137,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1236,10 +1226,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:smallint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1263,7 +1252,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1352,10 +1341,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1379,7 +1367,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1468,10 +1456,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1495,7 +1482,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1584,10 +1571,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1611,7 +1597,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1700,10 +1686,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1727,7 +1712,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1816,10 +1801,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1843,7 +1827,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out index 3e9a1ee909..b292c9a01b 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_3.q.out @@ -48,7 +48,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -86,6 +85,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key DESC, value DESC PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 @@ -140,7 +140,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -178,6 +177,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_3_n1 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 @@ -186,7 +186,6 @@ POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_3_n1 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL -1234567890.123456789000000000 -4400.000000000000000000 -1255.490000000000000000 @@ -215,6 +214,7 @@ NULL 125.200000000000000000 200.000000000000000000 1234567890.123456780000000000 +NULL PREHOOK: query: SELECT key, sum(value) FROM DECIMAL_3_n1 GROUP BY key ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 @@ -223,7 +223,6 @@ POSTHOOK: query: SELECT key, sum(value) FROM DECIMAL_3_n1 GROUP BY key ORDER BY POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -252,6 +251,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT value, sum(key) FROM DECIMAL_3_n1 GROUP BY value ORDER BY value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out index d365fb99ad..fc18645663 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_4.q.out @@ -56,7 +56,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_1_n0 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_1_n0 #### A masked pattern was here #### -NULL 0 -1234567890.1234567890000000000000000 -1234567890 -4400.0000000000000000000000000 4400 -1255.4900000000000000000000000 -1255 @@ -94,6 +93,7 @@ NULL 0 125.2000000000000000000000000 125 200.0000000000000000000000000 200 1234567890.1234567800000000000000000 1234567890 +NULL 0 PREHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2_n0 @@ -102,7 +102,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2_n0 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -140,6 +139,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2_n0 @@ -148,7 +148,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2_n0 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -186,6 +185,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2_n0 @@ -194,7 +194,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2_n0 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -232,6 +231,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: DROP TABLE DECIMAL_4_1_n0 PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_4_1_n0 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out index 5184b59672..585dab4151 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_5.q.out @@ -56,9 +56,6 @@ POSTHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5 #### A masked pattern was here #### -NULL -NULL -NULL -4400.00000 -1255.49000 -1.12200 @@ -94,6 +91,9 @@ NULL 124.00000 125.20000 200.00000 +NULL +NULL +NULL PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5 @@ -102,7 +102,6 @@ POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5 #### A masked pattern was here #### -NULL -4400.00000 -1255.49000 -1.12200 @@ -129,6 +128,7 @@ NULL 124.00000 125.20000 200.00000 +NULL PREHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 PREHOOK: type: QUERY POSTHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out index 8607eed001..1a35803207 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_6.q.out @@ -149,10 +149,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:decimal(10,5), 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 27 Data size: 2684 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -176,7 +175,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -220,11 +219,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_1 #### A masked pattern was here #### -NULL -1234567890 -NULL 0 -NULL 3 -NULL 4 -NULL 1234567890 -4400.00000 4400 -1255.49000 -1255 -1.12200 -11 @@ -247,6 +241,11 @@ NULL 1234567890 124.00000 124 125.20000 125 23232.23435 2 +NULL -1234567890 +NULL 0 +NULL 3 +NULL 4 +NULL 1234567890 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT * FROM DECIMAL_6_2 ORDER BY key, value PREHOOK: type: QUERY @@ -290,10 +289,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:decimal(17,4), 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -317,7 +315,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -361,7 +359,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_2 #### A masked pattern was here #### -NULL 0 -1234567890.1235 -1234567890 -4400.0000 4400 -1255.4900 -1255 @@ -388,6 +385,7 @@ NULL 0 2389432.2375 3 2389432.2375 4 1234567890.1235 1234567890 +NULL 0 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT T.key from ( SELECT key, value from DECIMAL_6_1 @@ -442,10 +440,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:decimal(18,5) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 54 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -486,10 +483,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:decimal(18,5) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 54 Data size: 5600 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -513,7 +509,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -569,12 +565,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_1 POSTHOOK: Input: default@decimal_6_2 #### A masked pattern was here #### -NULL -NULL -NULL -NULL -NULL -NULL -1234567890.12350 -4400.00000 -4400.00000 @@ -623,6 +613,12 @@ NULL 2389432.23750 2389432.23750 1234567890.12350 +NULL +NULL +NULL +NULL +NULL +NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v PREHOOK: type: CREATETABLE_AS_SELECT @@ -670,10 +666,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4] + keyColumns: 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:decimal(11,5) Statistics: Num rows: 27 Data size: 3132 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(11,5)) Execution mode: vectorized, llap @@ -698,7 +694,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -779,11 +775,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_3 #### A masked pattern was here #### -NULL -695344902 -NULL 0 -NULL 33 -NULL 44 -NULL 695344902 -4394.50000 48400 -1249.99000 -13805 4.37800 -121 @@ -806,3 +797,8 @@ NULL 695344902 129.50000 1364 130.70000 1375 23237.73435 22 +NULL -695344902 +NULL 0 +NULL 33 +NULL 44 +NULL 695344902 diff --git ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out index 30a6770868..ef94587ca8 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_aggregate.q.out @@ -100,10 +100,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 1:bigint, 2:decimal(20,10), 3:decimal(20,10), 4:decimal(30,10), 5:bigint, 6:decimal(23,14), 7:decimal(23,14), 8:decimal(33,14), 9:bigint Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint) Execution mode: vectorized, llap @@ -280,10 +280,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + valueColumns: 1:bigint, 2:decimal(20,10), 3:decimal(20,10), 4:decimal(30,10), 5:double, 6:double, 7:bigint, 8:decimal(23,14), 9:decimal(23,14), 10:decimal(33,14), 11:double, 12:double, 13:bigint Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: decimal(23,14)), _col9 (type: decimal(23,14)), _col10 (type: decimal(33,14)), _col11 (type: double), _col12 (type: double), _col13 (type: bigint) Execution mode: vectorized, llap @@ -494,10 +494,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:bigint, 6:decimal(16,0), 7:decimal(16,0), 8:decimal(26,0), 9:bigint Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: decimal(16,0)), _col8 (type: decimal(26,0)), _col9 (type: bigint) Execution mode: vectorized, llap @@ -693,10 +693,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:double, 6:double, 7:bigint, 8:decimal(16,0), 9:decimal(16,0), 10:decimal(26,0), 11:double, 12:double, 13:bigint Statistics: Num rows: 12289 Data size: 2662128 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: decimal(26,0)), _col11 (type: double), _col12 (type: double), _col13 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out index a7b2714ae6..9ffde7d6f2 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_expressions.q.out @@ -84,10 +84,9 @@ STAGE PLANS: sort order: ++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] + keyColumns: 4:decimal(25,14), 6:decimal(26,14), 8:decimal(38,13), 10:decimal(38,17), 11:decimal(12,10), 12:int, 13:smallint, 14:tinyint, 15:bigint, 16:boolean, 17:double, 18:float, 19:string, 20:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -112,7 +111,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaa + reduceColumnNullOrder: zzzzzzzzzzzzzz reduceColumnSortOrder: ++++++++++++++ allNative: false usesVectorUDFAdaptor: false @@ -258,10 +257,9 @@ STAGE PLANS: sort order: ++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [6, 8, 10, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22] + keyColumns: 6:decimal(11,3), 8:decimal(11,3), 10:decimal(21,11), 12:decimal(23,9), 13:decimal(5,3), 14:int, 15:smallint, 16:tinyint, 17:bigint, 18:boolean, 19:double, 20:float, 21:string, 22:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 455 Data size: 100294 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -286,7 +284,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaa + reduceColumnNullOrder: zzzzzzzzzzzzzz reduceColumnSortOrder: ++++++++++++++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out index b66fb9fca6..aaab5923ba 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out @@ -195,10 +195,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(26,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(26,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -472,10 +471,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(26,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(26,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(24,0) Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(24,0)) Execution mode: vectorized, llap @@ -826,10 +825,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(16,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(16,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1103,10 +1101,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(16,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(16,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(14,0) Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(14,0)) Execution mode: vectorized, llap @@ -1382,10 +1380,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(16,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(16,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 997 Data size: 106235 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1661,10 +1658,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(16,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(16,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(14,0) Statistics: Num rows: 997 Data size: 212470 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(14,0)) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out index 5e7e8cacef..5827587f05 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_precision.q.out @@ -59,6 +59,37 @@ POSTHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision #### A masked pattern was here #### +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0.1234567890 +0.1234567890 +1.2345678901 +1.2345678901 +1.2345678901 +12.3456789012 +12.3456789012 +12.3456789012 +123.4567890123 +123.4567890123 +123.4567890123 +1234.5678901235 +1234.5678901235 +1234.5678901235 +12345.6789012346 +12345.6789012346 +123456.7890123456 +123456.7890123457 +1234567.8901234560 +1234567.8901234568 +12345678.9012345600 +12345678.9012345679 +123456789.0123456000 +123456789.0123456789 +1234567890.1234560000 +1234567890.1234567890 NULL NULL NULL @@ -103,37 +134,6 @@ NULL NULL NULL NULL -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 -0.1234567890 -0.1234567890 -1.2345678901 -1.2345678901 -1.2345678901 -12.3456789012 -12.3456789012 -12.3456789012 -123.4567890123 -123.4567890123 -123.4567890123 -1234.5678901235 -1234.5678901235 -1234.5678901235 -12345.6789012346 -12345.6789012346 -123456.7890123456 -123456.7890123457 -1234567.8901234560 -1234567.8901234568 -12345678.9012345600 -12345678.9012345679 -123456789.0123456000 -123456789.0123456789 -1234567890.1234560000 -1234567890.1234567890 PREHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -142,50 +142,6 @@ POSTHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION ORDER POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision #### A masked pattern was here #### -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 @@ -217,14 +173,6 @@ NULL NULL NULL 123456789.0123456789 123456790.0123456789 123456788.0123456789 1234567890.1234560000 1234567891.1234560000 1234567889.1234560000 1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 -PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL @@ -269,6 +217,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 @@ -300,6 +256,50 @@ NULL NULL NULL 123456789.0123456789 246913578.0246913578 41152263.004115226300 1234567890.1234560000 2469135780.2469120000 411522630.041152000000 1234567890.1234567890 2469135780.2469135780 411522630.041152263000 +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL PREHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -308,50 +308,6 @@ POSTHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION ORDER BY `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision #### A masked pattern was here #### -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL 0.0000000000 0.000000000000 0.0000000000 0.000000000000 0.0000000000 0.000000000000 @@ -383,14 +339,6 @@ NULL NULL 123456789.0123456789 13717421.001371742100 1234567890.1234560000 137174210.013717333333 1234567890.1234567890 137174210.013717421000 -PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -435,6 +383,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 @@ -466,14 +422,6 @@ NULL NULL 123456789.0123456789 4572473.6671239140333 1234567890.1234560000 45724736.6712391111111 1234567890.1234567890 45724736.6712391403333 -PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -518,6 +466,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 @@ -549,6 +505,50 @@ NULL NULL 123456789.0123456789 15241578753238836.75019051998750191 1234567890.1234560000 1524157875323881726.87092138393600000 1234567890.1234567890 1524157875323883675.01905199875019052 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION @@ -601,10 +601,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:decimal(30,10), 1:bigint Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(30,10)), _col1 (type: bigint) Execution mode: vectorized, llap @@ -761,50 +760,6 @@ POSTHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION_txt_s POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_txt_small #### A masked pattern was here #### -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 @@ -836,14 +791,6 @@ NULL NULL NULL 123456789.0123456789 123456790.0123456789 123456788.0123456789 1234567890.1234560000 1234567891.1234560000 1234567889.1234560000 1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 -PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL @@ -888,6 +835,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 @@ -919,6 +874,50 @@ NULL NULL NULL 123456789.0123456789 246913578.0246913578 41152263.004115226300 1234567890.1234560000 2469135780.2469120000 411522630.041152000000 1234567890.1234567890 2469135780.2469135780 411522630.041152263000 +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL PREHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision_txt_small @@ -927,50 +926,6 @@ POSTHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION_txt_small ORDER POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_txt_small #### A masked pattern was here #### -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL 0.0000000000 0.000000000000 0.0000000000 0.000000000000 0.0000000000 0.000000000000 @@ -1002,14 +957,6 @@ NULL NULL 123456789.0123456789 13717421.001371742100 1234567890.1234560000 137174210.013717333333 1234567890.1234567890 137174210.013717421000 -PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -1054,6 +1001,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 @@ -1085,14 +1040,6 @@ NULL NULL 123456789.0123456789 4572473.6671239140333 1234567890.1234560000 45724736.6712391111111 1234567890.1234567890 45724736.6712391403333 -PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -1137,6 +1084,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 @@ -1168,6 +1123,50 @@ NULL NULL 123456789.0123456789 15241578753238836.75019051998750191 1234567890.1234560000 1524157875323881726.87092138393600000 1234567890.1234567890 1524157875323883675.01905199875019052 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION_txt_small PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION_txt_small @@ -1220,10 +1219,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:decimal(30,10), 1:bigint Statistics: Num rows: 1 Data size: 232 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(30,10)), _col1 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out index eb4a5888fe..4f6a2ccb5e 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_round.q.out @@ -74,10 +74,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -102,7 +102,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -191,10 +191,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:decimal(11,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(10,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,0)) Execution mode: vectorized, llap @@ -219,7 +219,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -336,10 +336,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -364,7 +364,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -453,10 +453,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumns: 2:decimal(11,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(10,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,0)) Execution mode: vectorized, llap @@ -481,7 +481,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -598,10 +598,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:decimal(11,0) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(11,0)) Execution mode: vectorized, llap @@ -626,7 +626,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -715,10 +715,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:decimal(11,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(10,0) Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,0)) Execution mode: vectorized, llap @@ -743,7 +743,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out index bb0cbfc225..ac9afeb510 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_round_2.q.out @@ -78,10 +78,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumns: 2:decimal(21,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] + valueColumns: 3:decimal(21,0), 4:decimal(22,1), 5:decimal(23,2), 6:decimal(24,3), 7:decimal(21,0), 8:decimal(21,0), 9:decimal(21,0), 10:decimal(21,0), 11:decimal(21,0), 12:decimal(21,0), 13:decimal(21,0), 14:decimal(21,0) Statistics: Num rows: 1 Data size: 1456 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(21,0)), _col2 (type: decimal(22,1)), _col3 (type: decimal(23,2)), _col4 (type: decimal(24,3)), _col5 (type: decimal(21,0)), _col6 (type: decimal(21,0)), _col7 (type: decimal(21,0)), _col8 (type: decimal(21,0)), _col9 (type: decimal(21,0)), _col10 (type: decimal(21,0)), _col11 (type: decimal(21,0)), _col12 (type: decimal(21,0)) Execution mode: vectorized, llap @@ -106,7 +106,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -246,10 +246,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:decimal(21,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22] + valueColumns: 4:decimal(21,0), 5:decimal(22,1), 6:decimal(23,2), 7:decimal(24,3), 8:decimal(25,4), 9:decimal(21,0), 10:decimal(21,0), 11:decimal(21,0), 12:decimal(21,0), 13:decimal(21,0), 14:decimal(21,0), 15:decimal(22,1), 16:decimal(23,2), 17:decimal(24,3), 18:decimal(25,4), 19:decimal(21,0), 20:decimal(21,0), 21:decimal(21,0), 22:decimal(21,0) Statistics: Num rows: 1 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(21,0)), _col2 (type: decimal(22,1)), _col3 (type: decimal(23,2)), _col4 (type: decimal(24,3)), _col5 (type: decimal(25,4)), _col6 (type: decimal(21,0)), _col7 (type: decimal(21,0)), _col8 (type: decimal(21,0)), _col9 (type: decimal(21,0)), _col10 (type: decimal(21,0)), _col11 (type: decimal(21,0)), _col12 (type: decimal(22,1)), _col13 (type: decimal(23,2)), _col14 (type: decimal(24,3)), _col15 (type: decimal(25,4)), _col16 (type: decimal(21,0)), _col17 (type: decimal(21,0)), _col18 (type: decimal(21,0)), _col19 (type: decimal(21,0)) Execution mode: vectorized, llap @@ -274,7 +274,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -441,10 +441,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumns: 2:decimal(21,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34] + valueColumns: 3:decimal(21,0), 4:decimal(21,0), 5:decimal(21,0), 6:decimal(21,0), 7:decimal(21,0), 8:decimal(21,0), 9:decimal(21,0), 10:decimal(21,0), 11:decimal(21,0), 12:decimal(21,0), 13:decimal(21,0), 14:decimal(21,0), 15:decimal(21,0), 16:decimal(21,0), 17:decimal(21,0), 18:decimal(21,0), 19:decimal(22,1), 20:decimal(23,2), 21:decimal(24,3), 22:decimal(25,4), 23:decimal(26,5), 24:decimal(27,6), 25:decimal(28,7), 26:decimal(29,8), 27:decimal(30,9), 28:decimal(31,10), 29:decimal(32,11), 30:decimal(33,12), 31:decimal(34,13), 32:decimal(35,14), 33:decimal(36,15), 34:decimal(37,16) Statistics: Num rows: 1 Data size: 3808 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(21,0)), _col2 (type: decimal(21,0)), _col3 (type: decimal(21,0)), _col4 (type: decimal(21,0)), _col5 (type: decimal(21,0)), _col6 (type: decimal(21,0)), _col7 (type: decimal(21,0)), _col8 (type: decimal(21,0)), _col9 (type: decimal(21,0)), _col10 (type: decimal(21,0)), _col11 (type: decimal(21,0)), _col12 (type: decimal(21,0)), _col13 (type: decimal(21,0)), _col14 (type: decimal(21,0)), _col15 (type: decimal(21,0)), _col16 (type: decimal(21,0)), _col17 (type: decimal(22,1)), _col18 (type: decimal(23,2)), _col19 (type: decimal(24,3)), _col20 (type: decimal(25,4)), _col21 (type: decimal(26,5)), _col22 (type: decimal(27,6)), _col23 (type: decimal(28,7)), _col24 (type: decimal(29,8)), _col25 (type: decimal(30,9)), _col26 (type: decimal(31,10)), _col27 (type: decimal(32,11)), _col28 (type: decimal(33,12)), _col29 (type: decimal(34,13)), _col31 (type: decimal(35,14)), _col32 (type: decimal(36,15)), _col33 (type: decimal(37,16)) Execution mode: vectorized, llap @@ -469,7 +469,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -625,10 +625,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3] + keyColumns: 3:decimal(30,9) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4] + valueColumns: 4:decimal(30,9) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(30,9)) Execution mode: vectorized, llap @@ -653,7 +653,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out index a35f6fe847..44b3ff70e8 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_trailing.q.out @@ -108,10 +108,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:decimal(10,4), 2:decimal(15,8) Statistics: Num rows: 30 Data size: 4936 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(10,4)), _col2 (type: decimal(15,8)) Execution mode: vectorized, llap @@ -136,7 +136,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out index f3d2e6f619..859aeba5b9 100644 --- ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out +++ ql/src/test/results/clientpositive/llap/vector_decimal_udf.q.out @@ -2320,10 +2320,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:decimal(30,10), 2:bigint Statistics: Num rows: 18 Data size: 2232 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(30,10)), _col2 (type: bigint) Execution mode: vectorized, llap @@ -2387,10 +2387,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 6, 1] + valueColumns: 4:decimal(38,18), 6:decimal(38,28), 1:decimal(30,10) Statistics: Num rows: 17 Data size: 5780 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(38,18)), _col2 (type: decimal(38,28)), _col3 (type: decimal(30,10)) Reducer 3 @@ -2398,7 +2398,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -3261,10 +3261,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:double, 2:double, 3:bigint Statistics: Num rows: 18 Data size: 504 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap @@ -3425,10 +3425,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:double, 2:double, 3:bigint Statistics: Num rows: 18 Data size: 504 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap @@ -3668,10 +3668,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(20,10) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(20,10)) Execution mode: vectorized, llap @@ -3799,10 +3798,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(20,10) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(20,10)) Execution mode: vectorized, llap @@ -3930,10 +3928,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -6301,10 +6298,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:decimal(25,3), 2:bigint Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(25,3)), _col2 (type: bigint) Execution mode: vectorized, llap @@ -6368,10 +6365,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 6, 1] + valueColumns: 4:decimal(38,16), 6:decimal(38,26), 1:decimal(25,3) Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: decimal(38,16)), _col2 (type: decimal(38,26)), _col3 (type: decimal(25,3)) Reducer 3 @@ -6379,7 +6376,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -7242,10 +7239,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:double, 2:double, 3:bigint Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap @@ -7406,10 +7403,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:double, 2:double, 3:bigint Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap @@ -7649,10 +7646,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(15,3) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(15,3)) Execution mode: vectorized, llap @@ -7780,10 +7776,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(15,3) Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(15,3)) Execution mode: vectorized, llap @@ -7911,10 +7906,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 120 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out new file mode 100644 index 0000000000..6ddcef6fd7 --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_full_outer_join.q.out @@ -0,0 +1,1228 @@ +PREHOOK: query: drop table if exists TJOIN1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists TJOIN1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: drop table if exists TJOIN2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists TJOIN2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@TJOIN1 +POSTHOOK: query: create table if not exists TJOIN1 (RNUM int , C1 int, C2 int) STORED AS orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@TJOIN1 +PREHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@TJOIN2 +POSTHOOK: query: create table if not exists TJOIN2 (RNUM int , C1 int, C2 char(2)) STORED AS orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@TJOIN2 +PREHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@TJOIN1STAGE +POSTHOOK: query: create table if not exists TJOIN1STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@TJOIN1STAGE +PREHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@TJOIN2STAGE +POSTHOOK: query: create table if not exists TJOIN2STAGE (RNUM int , C1 int, C2 char(2)) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' LINES TERMINATED BY '\n' STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@TJOIN2STAGE +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin1stage +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin1.txt' OVERWRITE INTO TABLE TJOIN1STAGE +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin1stage +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin2.txt' OVERWRITE INTO TABLE TJOIN2STAGE +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@tjoin2stage +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/tjoin2.txt' OVERWRITE INTO TABLE TJOIN2STAGE +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@tjoin2stage +PREHOOK: query: INSERT INTO TABLE TJOIN1 SELECT * from TJOIN1STAGE +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1stage +PREHOOK: Output: default@tjoin1 +POSTHOOK: query: INSERT INTO TABLE TJOIN1 SELECT * from TJOIN1STAGE +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1stage +POSTHOOK: Output: default@tjoin1 +POSTHOOK: Lineage: tjoin1.c1 SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin1.c2 EXPRESSION [(tjoin1stage)tjoin1stage.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin1.rnum SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:rnum, type:int, comment:null), ] +_col0 _col1 _col2 +PREHOOK: query: INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin2stage +PREHOOK: Output: default@tjoin2 +POSTHOOK: query: INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin2stage +POSTHOOK: Output: default@tjoin2 +POSTHOOK: Lineage: tjoin2.c1 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c1, type:int, comment:null), ] +POSTHOOK: Lineage: tjoin2.c2 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c2, type:char(2), comment:null), ] +POSTHOOK: Lineage: tjoin2.rnum SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:rnum, type:int, comment:null), ] +tjoin2stage.rnum tjoin2stage.c1 tjoin2stage.c2 +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c1j2 c2j2 +0 10 15 10 BB +0 10 15 10 FF +1 20 25 NULL NULL +2 NULL 50 NULL NULL +NULL NULL NULL 15 DD +NULL NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c1j2 c2j2 +0 10 15 10 BB +0 10 15 10 FF +1 20 25 NULL NULL +2 NULL 50 NULL NULL +NULL NULL NULL 15 DD +NULL NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int, 2:int + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 2] + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c1j2 c2j2 +0 10 15 10 BB +0 10 15 10 FF +1 20 25 NULL NULL +2 NULL 50 NULL NULL +NULL NULL NULL 15 DD +NULL NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int, 2:int + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 2] + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c1j2 c2j2 +0 10 15 10 BB +0 10 15 10 FF +1 20 25 NULL NULL +2 NULL 50 NULL NULL +NULL NULL NULL 15 DD +NULL NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int, 2:int + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 2] + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c1j2 c2j2 +0 10 15 10 BB +0 10 15 10 FF +1 20 25 NULL NULL +2 NULL 50 NULL NULL +NULL NULL NULL 15 DD +NULL NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int, 2:int + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 2] + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 714 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c1 as c1j2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c1j2 c2j2 +0 10 15 10 BB +0 10 15 10 FF +1 20 25 NULL NULL +2 NULL 50 NULL NULL +NULL NULL NULL 15 DD +NULL NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int, 2:int + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] + Select Operator + expressions: c1 (type: int), c2 (type: char(2)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 2] + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) + Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col1 (type: char(2)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col4 + Statistics: Num rows: 7 Data size: 686 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), _col4 (type: char(2)) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 7 Data size: 686 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 686 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 +0 10 15 BB +0 10 15 FF +1 20 25 NULL +2 NULL 50 NULL +NULL NULL NULL DD +NULL NULL NULL EE +PREHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +select tjoin1.rnum, tjoin1.c1, tjoin1.c2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: tjoin1 + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] + Select Operator + expressions: rnum (type: int), c1 (type: int), c2 (type: int) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int, 2:int + Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: int), _col2 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 3 + Map Operator Tree: + TableScan + alias: tjoin2 + Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] + Select Operator + expressions: c1 (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1] + Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 4 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 7 Data size: 84 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 7 Data size: 84 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +PREHOOK: type: QUERY +PREHOOK: Input: default@tjoin1 +PREHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +POSTHOOK: query: select tjoin1.rnum, tjoin1.c1, tjoin1.c2 from tjoin1 full outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 ) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@tjoin1 +POSTHOOK: Input: default@tjoin2 +#### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 +0 10 15 +0 10 15 +1 20 25 +2 NULL 50 +NULL NULL NULL +NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_fast.q.out ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_fast.q.out new file mode 100644 index 0000000000..169d94cc93 --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_fast.q.out @@ -0,0 +1,3945 @@ +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: Lineage: fullouter_long_big_1a.key SIMPLE [(fullouter_long_big_1a_txt)fullouter_long_big_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: Lineage: fullouter_long_big_1a_nonull.key SIMPLE [(fullouter_long_big_1a_nonull_txt)fullouter_long_big_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: Lineage: fullouter_long_small_1a.key SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a.s_date SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.key SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.s_date SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:s_date:date, 2:ROW__ID:struct] + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:bigint, s_date:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: bigint) + 1 _col0 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:s_date:date, 2:ROW__ID:struct] + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:bigint, s_date:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:date + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: bigint) + 1 KEY.reducesinkkey0 (type: bigint) + Map Join Vectorization: + bigTableKeyColumns: 0:bigint + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:bigint, 1:date + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 2 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 2:bigint, 0:bigint, 1:date + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:bigint, 1:date + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +PREHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:smallint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: smallint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:smallint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:smallint, 1:s_timestamp:timestamp, 2:ROW__ID:struct] + Select Operator + expressions: key (type: smallint), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:timestamp + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: timestamp) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:smallint, s_timestamp:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:smallint, VALUE._col0:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint) + 1 KEY.reducesinkkey0 (type: smallint) + Map Join Vectorization: + bigTableKeyColumns: 0:smallint + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:smallint, 1:timestamp + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 2 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 2:smallint, 0:smallint, 1:timestamp + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:smallint, 1:timestamp + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: smallint), _col2 (type: timestamp) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:smallint, VALUE._col0:smallint, VALUE._col1:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), VALUE._col0 (type: smallint), VALUE._col1 (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +-25394 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +32030 32030 2101-09-09 07:35:05.145 +NULL -14172 1918-09-13 11:44:24.496926711 +NULL -14172 2355-01-14 23:23:34 +NULL -14172 2809-06-07 02:10:58 +NULL -15361 2219-09-15 20:15:03.000169887 +NULL -15361 2434-08-13 20:37:07.000172979 +NULL -15427 2023-11-09 19:31:21 +NULL -15427 2046-06-07 22:58:40.728 +NULL -15427 2355-01-08 12:34:11.617 +NULL -19167 2230-12-22 20:25:39.000242111 +NULL -19167 2319-08-26 11:07:11.268 +NULL -20517 2233-12-20 04:06:56.666522799 +NULL -20517 2774-06-23 12:04:06.5 +NULL -20824 2478-11-05 00:28:05 +NULL -22422 1949-03-13 00:07:53.075 +NULL -22422 2337-07-19 06:33:02.000353352 +NULL -22422 2982-12-28 06:30:26.000883228 +NULL -23117 2037-01-05 21:52:30.685952759 +NULL -24775 2035-03-26 08:11:23.375224153 +NULL -24775 2920-08-06 15:58:28.261059449 +NULL -26998 2268-08-04 12:48:11.848006292 +NULL -26998 2428-12-26 07:53:45.96925825 +NULL -26998 2926-07-18 09:02:46.077 +NULL -29600 2333-11-02 15:06:30 +NULL -30059 2269-05-04 21:23:44.000339209 +NULL -30059 2420-12-10 22:12:30 +NULL -30059 2713-10-13 09:28:49 +NULL -30306 2619-05-24 10:35:58.000774018 +NULL -4279 2214-09-10 03:53:06 +NULL -4279 2470-08-12 11:21:14.000955747 +NULL -7373 2662-10-28 12:07:02.000526564 +NULL -7624 2219-12-03 17:07:19 +NULL -7624 2289-08-28 00:14:34 +NULL -7624 2623-03-20 03:18:45.00006465 +NULL -8087 2550-06-26 23:57:42.588007617 +NULL -8087 2923-07-02 11:40:26.115 +NULL -8435 2642-02-07 11:45:04.353231638 +NULL -8435 2834-12-06 16:38:18.901 +NULL -8624 2120-02-15 15:36:40.000758423 +NULL -8624 2282-03-28 07:58:16 +NULL -8624 2644-05-04 04:45:07.839 +NULL 10553 2168-05-05 21:10:59.000152113 +NULL 11232 2038-04-06 14:53:59 +NULL 11232 2507-01-27 22:04:22.49661421 +NULL 11232 2533-11-26 12:22:18 +NULL 13598 2421-05-20 14:18:31.000264698 +NULL 13598 2909-06-25 23:22:50 +NULL 14865 2079-10-06 16:54:35.117 +NULL 14865 2220-02-28 03:41:36 +NULL 14865 2943-03-21 00:42:10.505 +NULL 17125 2236-07-14 01:54:40.927230276 +NULL 17125 2629-11-15 15:34:52 +NULL 21181 2253-03-12 11:55:48.332 +NULL 21181 2434-02-20 00:46:29.633 +NULL 21436 2526-09-22 23:44:55 +NULL 21436 2696-05-08 05:19:24.112 +NULL 24870 2752-12-26 12:32:23.03685163 +NULL 2632 2561-12-15 15:42:27 +NULL 26484 1919-03-04 07:32:37.519 +NULL 26484 2953-03-10 02:05:26.508953676 +NULL 2748 2298-06-20 21:01:24 +NULL 2748 2759-02-13 18:04:36.000307355 +NULL 2748 2862-04-20 13:12:39.482805897 +NULL 29407 2385-12-14 06:03:39.597 +NULL 3198 2223-04-14 13:20:49 +NULL 3198 2428-06-13 16:21:33.955 +NULL 3198 2736-12-20 03:59:50.343550301 +NULL 4510 2293-01-17 13:47:41.00001006 +NULL 4510 2777-03-24 03:44:28.000169723 +NULL NULL 2124-05-07 15:01:19.021 +NULL NULL 2933-06-20 11:48:09.000839488 +NULL NULL 2971-08-07 12:02:11.000948152 +NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:b_string:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: int), b_string (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, b_string:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:s_decimal:decimal(38,18), 2:ROW__ID:struct] + Select Operator + expressions: key (type: int), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:decimal(38,18) + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(38,18)) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, s_decimal:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:decimal(38,18) + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 2 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 2:int, 3:string, 0:int, 1:decimal(38,18) + smallTableValueMapping: 3:string + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 3:string, 0:int, 1:decimal(38,18) + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: decimal(38,18)) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:string, VALUE._col1:int, VALUE._col2:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: int), VALUE._col2 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +-1437463633 JU NULL NULL +-1437463633 NULL NULL NULL +-1437463633 SOWDWMS NULL NULL +-1437463633 TKTKGVGFW NULL NULL +-1437463633 YYXPPCH NULL NULL +1725068083 MKSCCE NULL NULL +1928928239 AMKTIWQ NULL NULL +1928928239 NULL NULL NULL +1928928239 NULL NULL NULL +1928928239 VAQHVRI NULL NULL +NULL ABBZ NULL NULL +NULL NULL -1093006502 -69.556658280000000000 +NULL NULL -1197550983 -0.558879692200000000 +NULL NULL -1197550983 0.100000000000000000 +NULL NULL -1197550983 71852.833867441261300000 +NULL NULL -1250662632 -544.554649000000000000 +NULL NULL -1250662632 5454127198.951479000000000000 +NULL NULL -1250662632 93104.000000000000000000 +NULL NULL -1264372462 -6993985240226.000000000000000000 +NULL NULL -1264372462 -899.000000000000000000 +NULL NULL -1264372462 0.883000000000000000 +NULL NULL -1490239076 92253.232096000000000000 +NULL NULL -1681455031 -11105.372477000000000000 +NULL NULL -1681455031 -6.454300000000000000 +NULL NULL -1740848088 -9.157000000000000000 +NULL NULL -1740848088 0.506394259000000000 +NULL NULL -1740848088 901.441000000000000000 +NULL NULL -2048404259 -0.322296044625100000 +NULL NULL -2048404259 3939387044.100000000000000000 +NULL NULL -2123273881 -55.891980000000000000 +NULL NULL -2123273881 3.959000000000000000 +NULL NULL -243940373 -583.258000000000000000 +NULL NULL -243940373 -97176129669.654953000000000000 +NULL NULL -369457052 560.119078830904550000 +NULL NULL -369457052 7.700000000000000000 +NULL NULL -424713789 0.480000000000000000 +NULL NULL -466171792 0.000000000000000000 +NULL NULL -466171792 4227.534400000000000000 +NULL NULL -466171792 69.900000000000000000 +NULL NULL -477147437 6.000000000000000000 +NULL NULL -793950320 -0.100000000000000000 +NULL NULL -793950320 -16.000000000000000000 +NULL NULL -934092157 -7843850349.571300380000000000 +NULL NULL -99948814 -38076694.398100000000000000 +NULL NULL -99948814 -96386.438000000000000000 +NULL NULL 1039864870 0.700000000000000000 +NULL NULL 1039864870 94.040000000000000000 +NULL NULL 1039864870 987601.570000000000000000 +NULL NULL 1091836730 -5017.140000000000000000 +NULL NULL 1091836730 0.020000000000000000 +NULL NULL 1242586043 -4.000000000000000000 +NULL NULL 1242586043 -749975924224.630000000000000000 +NULL NULL 1242586043 71.148500000000000000 +NULL NULL 1479580778 92077343080.700000000000000000 +NULL NULL 150678276 -8278.000000000000000000 +NULL NULL 150678276 15989394.843600000000000000 +NULL NULL 1519948464 152.000000000000000000 +NULL NULL 1561921421 -5.405000000000000000 +NULL NULL 1561921421 53050.550000000000000000 +NULL NULL 1585021913 -5762331.066971120000000000 +NULL NULL 1585021913 607.227470000000000000 +NULL NULL 1585021913 745222.668089540000000000 +NULL NULL 1719049112 -7888197.000000000000000000 +NULL NULL 1738753776 -99817635066320.241600000000000000 +NULL NULL 1738753776 1525.280459649262000000 +NULL NULL 1755897735 -39.965207000000000000 +NULL NULL 1785750809 47443.115000000000000000 +NULL NULL 1801735854 -1760956929364.267000000000000000 +NULL NULL 1801735854 -438541294.700000000000000000 +NULL NULL 1816559437 -1035.700900000000000000 +NULL NULL 1909136587 -8610.078036935181000000 +NULL NULL 1909136587 181.076815359440000000 +NULL NULL 193709887 -0.566300000000000000 +NULL NULL 193709887 -19889.830000000000000000 +NULL NULL 193709887 0.800000000000000000 +NULL NULL 284554389 5.727146000000000000 +NULL NULL 294598722 -3542.600000000000000000 +NULL NULL 294598722 -9377326244.444000000000000000 +NULL NULL 448130683 -4302.485366846491000000 +NULL NULL 452719211 3020.293893074463600000 +NULL NULL 452719211 83003.437220000000000000 +NULL NULL 466567142 -58810.605860000000000000 +NULL NULL 466567142 -9763217822.129028000000000000 +NULL NULL 466567142 196.578529539858400000 +NULL NULL 560745412 678.250000000000000000 +NULL NULL 698032489 -330457.429262583900000000 +NULL NULL 891262439 -0.040000000000000000 +NULL NULL 90660785 -4564.517185000000000000 +NULL NULL 90660785 12590.288613000000000000 +NULL NULL NULL 1.089120893565337000 +NULL NULL NULL 4.261652270000000000 +NULL NULL NULL 682070836.264960300000000000 +PREHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:ROW__ID:struct] + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:ROW__ID:struct] + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: KEY.reducesinkkey0:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:int + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 1 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 1:int, 0:int + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1 + input vertices: + 0 Map 1 + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +-1780951928 NULL +-2038654700 -2038654700 +-670834064 NULL +-702028721 NULL +-702028721 NULL +-702028721 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +NULL -1003639073 +NULL -1014271154 +NULL -1036083124 +NULL -1210744742 +NULL -1323620496 +NULL -1379355738 +NULL -1712018127 +NULL -1792852276 +NULL -1912571616 +NULL -497171161 +NULL -683339273 +NULL -707688773 +NULL -747044796 +NULL -894799664 +NULL -932176731 +NULL 103640700 +NULL 1164387380 +NULL 1372592319 +NULL 1431997749 +NULL 1614287784 +NULL 162858059 +NULL 1635405412 +NULL 1685473722 +NULL 1780951928 +NULL 1825107160 +NULL 1831520491 +NULL 1840266070 +NULL 1997943409 +NULL 2119085509 +NULL 246169862 +NULL 260588085 +NULL 41376947 +NULL 436878811 +NULL 533298451 +NULL 670834064 +NULL 699007128 +NULL 699863556 +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: Lineage: fullouter_multikey_big_1a.key0 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a.key1 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key0 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key1 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: Lineage: fullouter_multikey_small_1a.key0 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a.key1 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key0 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key1 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:smallint, 1:key1:int, 2:ROW__ID:struct] + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key0:smallint, key1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:smallint, 1:key1:int, 2:ROW__ID:struct] + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key0:smallint, key1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aa + reduceColumnSortOrder: ++ + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:smallint, KEY.reducesinkkey1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + 1 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:smallint, 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:smallint, 1:int + className: VectorMapJoinFullOuterMultiKeyOperator + fullOuterSmallTableKeyMapping: 0 -> 2, 1 -> 3 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 2:smallint, 3:int, 0:smallint, 1:int + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:smallint, 3:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:smallint, 1:int + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: smallint), _col3 (type: int) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zz + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:smallint, KEY.reducesinkkey1:int, VALUE._col0:smallint, VALUE._col1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: Lineage: fullouter_multikey_big_1b.key0 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key1 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key2 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: Lineage: fullouter_multikey_small_1b.key0 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key1 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key2 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.s_decimal SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:s_decimal, type:decimal(38,18), comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:timestamp, 1:key1:smallint, 2:key2:string, 3:ROW__ID:struct] + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:timestamp, 1:smallint, 2:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: key0:timestamp, key1:smallint, key2:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:timestamp, 1:key1:smallint, 2:key2:string, 3:s_decimal:decimal(38,18), 4:ROW__ID:struct] + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:timestamp, 1:smallint, 2:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 3:decimal(38,18) + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + value expressions: _col3 (type: decimal(38,18)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: key0:timestamp, key1:smallint, key2:string, s_decimal:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aaa + reduceColumnSortOrder: +++ + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:smallint, KEY.reducesinkkey2:string, VALUE._col0:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [timestamp, bigint, string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + 1 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + Map Join Vectorization: + bigTableKeyColumns: 0:timestamp, 1:smallint, 2:string + bigTableRetainColumnNums: [0, 1, 2, 3] + bigTableValueColumns: 0:timestamp, 1:smallint, 2:string, 3:decimal(38,18) + className: VectorMapJoinFullOuterMultiKeyOperator + fullOuterSmallTableKeyMapping: 0 -> 4, 1 -> 5, 2 -> 6 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 4:timestamp, 5:smallint, 6:string, 0:timestamp, 1:smallint, 2:string, 3:decimal(38,18) + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 4:timestamp, 5:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 6:string, 0:timestamp, 1:smallint, 2:string, 3:decimal(38,18) + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: string), _col3 (type: timestamp), _col4 (type: smallint), _col5 (type: string), _col6 (type: decimal(38,18)) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zz + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 7 + dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:smallint, VALUE._col0:string, VALUE._col1:timestamp, VALUE._col2:smallint, VALUE._col3:string, VALUE._col4:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), VALUE._col0 (type: string), VALUE._col1 (type: timestamp), VALUE._col2 (type: smallint), VALUE._col3 (type: string), VALUE._col4 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b +PREHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b +POSTHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 21635 ANCO NULL NULL NULL NULL +2082-07-14 04:00:40.695380469 12556 NCYBDW NULL NULL NULL NULL +2093-04-10 23:36:54.846 1446 GHZVPWFO NULL NULL NULL NULL +2093-04-10 23:36:54.846 28996 Q NULL NULL NULL NULL +2093-04-10 23:36:54.846 NULL NULL NULL NULL NULL NULL +2188-06-04 15:03:14.963259704 9468 AAA 2188-06-04 15:03:14.963259704 9468 AAA 2.754963520000000000 +2299-11-15 16:41:30.401 -31077 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 -6909 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 1446 NULL NULL NULL NULL NULL +2608-02-23 23:44:02.546440891 26184 NCYBDW NULL NULL NULL NULL +2686-05-23 07:46:46.565832918 13212 NCYBDW 2686-05-23 07:46:46.565832918 13212 NCYBDW -917116793.400000000000000000 +2686-05-23 07:46:46.565832918 NULL GHZVPWFO NULL NULL NULL NULL +2898-10-01 22:27:02.000871113 10361 NCYBDW NULL NULL NULL NULL +NULL -6909 NULL NULL NULL NULL NULL +NULL 21635 ANCO NULL NULL NULL NULL +NULL NULL CCWYD NULL NULL NULL NULL +NULL NULL NULL 1905-04-20 13:42:25.000469776 2638 KAUUFF 7.000000000000000000 +NULL NULL NULL 1919-06-20 00:16:50.611028595 20223 ZKBC -23.000000000000000000 +NULL NULL NULL 1931-12-04 11:13:47.269597392 23196 HVJCQMTQL -9697532.899400000000000000 +NULL NULL NULL 1941-10-16 02:19:36.000423663 -24459 AO -821445414.457971200000000000 +NULL NULL NULL 1957-02-01 14:00:29.000548421 -16085 ZVEUKC -2312.814900000000000000 +NULL NULL NULL 1957-03-06 09:57:31 -26373 NXLNNSO 2.000000000000000000 +NULL NULL NULL 1980-09-13 19:57:15 NULL M 57650.772300000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 7506645.953700000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 98790.713907420831000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -22419 LOTLS 342.372604022858400000 +NULL NULL NULL 2038-10-12 09:15:33.000539653 -19598 YKNIAJW -642807895924.660000000000000000 +NULL NULL NULL 2044-05-02 07:00:03.35 -8751 ZSMB -453797242.029791752000000000 +NULL NULL NULL 2071-07-21 20:02:32.000250697 2638 NRUV -66198.351092000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 5.000000000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 726945733.419300000000000000 +NULL NULL NULL 2075-10-25 20:32:40.000792874 NULL NULL 226612651968.360760000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR -394.086700000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR 67892053.023760940000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV -85184687349898.892000000000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 0.439686100000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 482.538341135921900000 +NULL NULL NULL 2105-01-04 16:27:45 23100 ZSMB -83.232800000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 NULL -9784.820000000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 UANGISEXR -5996.306000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -1515597428.000000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -4016.960800000000000000 +NULL NULL NULL 2201-07-05 17:22:06.084206844 -24459 UBGT 1.506948328200000000 +NULL NULL NULL 2238-05-17 19:27:25.519 20223 KQCM -0.010950000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ -0.261490000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ 37.728800000000000000 +NULL NULL NULL 2266-09-26 06:27:29.000284762 20223 EDYJJN 14.000000000000000000 +NULL NULL NULL 2301-06-03 17:16:19 15332 ZVEUKC 0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 -13125 JFYW 6.086657000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR 1279917802.420000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 12587 OPW -4.594895040000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T -0.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 2720.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 61.302000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G -4319470286240016.300000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G 975.000000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 30285 GSJPSIYOU 0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO -0.435500000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO 0.713517473350000000 +NULL NULL NULL 2309-01-15 12:43:49 22821 ZMY 40.900000000000000000 +NULL NULL NULL 2332-06-14 07:02:42.32 -26373 XFFFDTQ 56845106806308.900000000000000000 +NULL NULL NULL 2333-07-28 09:59:26 23196 RKSK 37872288434740893.500000000000000000 +NULL NULL NULL 2338-02-12 09:30:07 20223 CTH -6154.763054000000000000 +NULL NULL NULL 2340-12-15 05:15:17.133588982 23663 HHTP 33383.800000000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 74179461.880493000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 92.150000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -32.460000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -472.000000000000000000 +NULL NULL NULL 2391-01-17 15:28:37.00045143 16160 ZVEUKC 771355639420297.133000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB -5151598.347000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB 0.767183260000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -162.950000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -9926693851.000000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR 0.400000000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR -769088.176482000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR 93262.914526611000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -9575827.553960000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -991.436050000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB 8694.890000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB -582687.000000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB 67.417990000000000000 +NULL NULL NULL 2467-05-11 06:04:13.426693647 23196 EIBSDASR -8.554888380100000000 +NULL NULL NULL 2480-10-02 09:31:37.000770961 -26373 NBN -5875.519725200000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ -49.512190000000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ 0.445800000000000000 +NULL NULL NULL 2512-10-06 03:03:03 13195 CRJ 14.000000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X -922.695158410700000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X 761196.522000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 24313 QBHUG -8423.151573236000000000 +NULL NULL NULL 2512-10-06 03:03:03 32099 ARNZ -0.410000000000000000 +NULL NULL NULL 2525-05-12 15:59:35 -24459 SAVRGA 53106747151.863300000000000000 +NULL NULL NULL 2535-03-01 05:04:49.000525883 23663 ALIQKNXHE -0.166569100000000000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 41.774515077866460000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 6.801285170800000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -12923 PPTJPFR 5.400000000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -17786 HYEGQ -84.169614329419000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 7362887891522.378200000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 749563668434009.650000000000000000 +NULL NULL NULL 2668-06-25 07:12:37.000970744 2638 TJE -2.779682700000000000 +NULL NULL NULL 2688-02-06 20:58:42.000947837 20223 PAIY 67661.735000000000000000 +NULL NULL NULL 2743-12-27 05:16:19.000573579 -12914 ZVEUKC -811984611.517849700000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 368.000000000000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 60.602579700000000000 +NULL NULL NULL 2808-07-09 02:10:11.928498854 -19598 FHFX 0.300000000000000000 +NULL NULL NULL 2829-06-04 08:01:47.836 22771 ZVEUKC 94317.753180000000000000 +NULL NULL NULL 2861-05-27 07:13:01.000848622 -19598 WKPXNLXS 29399.000000000000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -4244.926206619000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -9951044.000000000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC -56082455.033918000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC 57.621752577880370000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 51.732330327300000000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 6370.000000000000000000 +NULL NULL NULL 2898-12-18 03:37:17 -24459 MHNBXPBM 14.236693562384810000 +NULL NULL NULL 2913-07-17 15:06:58.041 -10206 NULL -0.200000000000000000 +NULL NULL NULL 2938-12-21 23:35:59.498 29362 ZMY 0.880000000000000000 +NULL NULL NULL 2957-05-07 10:41:46 20223 OWQT -586953.153681000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF -96.300000000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF 2.157765900000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -18138 VDPN 8924831210.427680190000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -32485 AGEPWWLJF -48431309405.652522000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -375994644577.315257000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -81.000000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ 9.178000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 14500 WXLTRFQP -23.819800000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 6689 TFGVOGPJF -0.010000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -27394351.300000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -5.751278023000000000 +NULL NULL NULL NULL -12914 ZVEUKC 221.000000000000000000 +NULL NULL NULL NULL NULL NULL -2.400000000000000000 +NULL NULL NULL NULL NULL NULL -2207.300000000000000000 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: Lineage: fullouter_string_big_1a.key SIMPLE [(fullouter_string_big_1a_txt)fullouter_string_big_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: Lineage: fullouter_string_big_1a_nonull.key SIMPLE [(fullouter_string_big_1a_nonull_txt)fullouter_string_big_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: Lineage: fullouter_string_small_1a.key SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_date SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_timestamp SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.key SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_date SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_timestamp SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:s_date:date, 2:s_timestamp:timestamp, 3:ROW__ID:struct] + Select Operator + expressions: key (type: string), s_date (type: date), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date, 2:timestamp + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: timestamp) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: key:string, s_date:date, s_timestamp:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:date, VALUE._col1:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: string) + 1 KEY.reducesinkkey0 (type: string) + Map Join Vectorization: + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [0, 1, 2] + bigTableValueColumns: 0:string, 1:date, 2:timestamp + className: VectorMapJoinFullOuterStringOperator + fullOuterSmallTableKeyMapping: 0 -> 3 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Fast Hash Table and No Hybrid Hash Join IS true + projectedOutput: 3:string, 0:string, 1:date, 2:timestamp + hashTableImplementationType: FAST + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 3:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:string, 1:date, 2:timestamp + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: date), _col3 (type: timestamp) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:string, VALUE._col1:date, VALUE._col2:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: date), VALUE._col2 (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_optimized.q.out ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_optimized.q.out new file mode 100644 index 0000000000..da513dbdd6 --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_optimized.q.out @@ -0,0 +1,3945 @@ +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: Lineage: fullouter_long_big_1a.key SIMPLE [(fullouter_long_big_1a_txt)fullouter_long_big_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: Lineage: fullouter_long_big_1a_nonull.key SIMPLE [(fullouter_long_big_1a_nonull_txt)fullouter_long_big_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: Lineage: fullouter_long_small_1a.key SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a.s_date SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.key SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.s_date SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:s_date:date, 2:ROW__ID:struct] + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:bigint, s_date:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: bigint) + 1 _col0 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:s_date:date, 2:ROW__ID:struct] + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:bigint, s_date:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:date + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: bigint) + 1 KEY.reducesinkkey0 (type: bigint) + Map Join Vectorization: + bigTableKeyColumns: 0:bigint + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:bigint, 1:date + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 2 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 2:bigint, 0:bigint, 1:date + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:bigint, 1:date + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +PREHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:smallint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: smallint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:smallint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:smallint, 1:s_timestamp:timestamp, 2:ROW__ID:struct] + Select Operator + expressions: key (type: smallint), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:timestamp + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: timestamp) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:smallint, s_timestamp:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:smallint, VALUE._col0:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint) + 1 KEY.reducesinkkey0 (type: smallint) + Map Join Vectorization: + bigTableKeyColumns: 0:smallint + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:smallint, 1:timestamp + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 2 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 2:smallint, 0:smallint, 1:timestamp + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:smallint, 1:timestamp + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: smallint), _col2 (type: timestamp) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:smallint, VALUE._col0:smallint, VALUE._col1:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), VALUE._col0 (type: smallint), VALUE._col1 (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +-25394 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +32030 32030 2101-09-09 07:35:05.145 +NULL -14172 1918-09-13 11:44:24.496926711 +NULL -14172 2355-01-14 23:23:34 +NULL -14172 2809-06-07 02:10:58 +NULL -15361 2219-09-15 20:15:03.000169887 +NULL -15361 2434-08-13 20:37:07.000172979 +NULL -15427 2023-11-09 19:31:21 +NULL -15427 2046-06-07 22:58:40.728 +NULL -15427 2355-01-08 12:34:11.617 +NULL -19167 2230-12-22 20:25:39.000242111 +NULL -19167 2319-08-26 11:07:11.268 +NULL -20517 2233-12-20 04:06:56.666522799 +NULL -20517 2774-06-23 12:04:06.5 +NULL -20824 2478-11-05 00:28:05 +NULL -22422 1949-03-13 00:07:53.075 +NULL -22422 2337-07-19 06:33:02.000353352 +NULL -22422 2982-12-28 06:30:26.000883228 +NULL -23117 2037-01-05 21:52:30.685952759 +NULL -24775 2035-03-26 08:11:23.375224153 +NULL -24775 2920-08-06 15:58:28.261059449 +NULL -26998 2268-08-04 12:48:11.848006292 +NULL -26998 2428-12-26 07:53:45.96925825 +NULL -26998 2926-07-18 09:02:46.077 +NULL -29600 2333-11-02 15:06:30 +NULL -30059 2269-05-04 21:23:44.000339209 +NULL -30059 2420-12-10 22:12:30 +NULL -30059 2713-10-13 09:28:49 +NULL -30306 2619-05-24 10:35:58.000774018 +NULL -4279 2214-09-10 03:53:06 +NULL -4279 2470-08-12 11:21:14.000955747 +NULL -7373 2662-10-28 12:07:02.000526564 +NULL -7624 2219-12-03 17:07:19 +NULL -7624 2289-08-28 00:14:34 +NULL -7624 2623-03-20 03:18:45.00006465 +NULL -8087 2550-06-26 23:57:42.588007617 +NULL -8087 2923-07-02 11:40:26.115 +NULL -8435 2642-02-07 11:45:04.353231638 +NULL -8435 2834-12-06 16:38:18.901 +NULL -8624 2120-02-15 15:36:40.000758423 +NULL -8624 2282-03-28 07:58:16 +NULL -8624 2644-05-04 04:45:07.839 +NULL 10553 2168-05-05 21:10:59.000152113 +NULL 11232 2038-04-06 14:53:59 +NULL 11232 2507-01-27 22:04:22.49661421 +NULL 11232 2533-11-26 12:22:18 +NULL 13598 2421-05-20 14:18:31.000264698 +NULL 13598 2909-06-25 23:22:50 +NULL 14865 2079-10-06 16:54:35.117 +NULL 14865 2220-02-28 03:41:36 +NULL 14865 2943-03-21 00:42:10.505 +NULL 17125 2236-07-14 01:54:40.927230276 +NULL 17125 2629-11-15 15:34:52 +NULL 21181 2253-03-12 11:55:48.332 +NULL 21181 2434-02-20 00:46:29.633 +NULL 21436 2526-09-22 23:44:55 +NULL 21436 2696-05-08 05:19:24.112 +NULL 24870 2752-12-26 12:32:23.03685163 +NULL 2632 2561-12-15 15:42:27 +NULL 26484 1919-03-04 07:32:37.519 +NULL 26484 2953-03-10 02:05:26.508953676 +NULL 2748 2298-06-20 21:01:24 +NULL 2748 2759-02-13 18:04:36.000307355 +NULL 2748 2862-04-20 13:12:39.482805897 +NULL 29407 2385-12-14 06:03:39.597 +NULL 3198 2223-04-14 13:20:49 +NULL 3198 2428-06-13 16:21:33.955 +NULL 3198 2736-12-20 03:59:50.343550301 +NULL 4510 2293-01-17 13:47:41.00001006 +NULL 4510 2777-03-24 03:44:28.000169723 +NULL NULL 2124-05-07 15:01:19.021 +NULL NULL 2933-06-20 11:48:09.000839488 +NULL NULL 2971-08-07 12:02:11.000948152 +NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:b_string:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: int), b_string (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, b_string:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:s_decimal:decimal(38,18), 2:ROW__ID:struct] + Select Operator + expressions: key (type: int), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:decimal(38,18) + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(38,18)) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, s_decimal:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:decimal(38,18) + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 2 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 2:int, 3:string, 0:int, 1:decimal(38,18) + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 3:string, 0:int, 1:decimal(38,18) + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: decimal(38,18)) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:string, VALUE._col1:int, VALUE._col2:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: int), VALUE._col2 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +-1437463633 JU NULL NULL +-1437463633 NULL NULL NULL +-1437463633 SOWDWMS NULL NULL +-1437463633 TKTKGVGFW NULL NULL +-1437463633 YYXPPCH NULL NULL +1725068083 MKSCCE NULL NULL +1928928239 AMKTIWQ NULL NULL +1928928239 NULL NULL NULL +1928928239 NULL NULL NULL +1928928239 VAQHVRI NULL NULL +NULL ABBZ NULL NULL +NULL NULL -1093006502 -69.556658280000000000 +NULL NULL -1197550983 -0.558879692200000000 +NULL NULL -1197550983 0.100000000000000000 +NULL NULL -1197550983 71852.833867441261300000 +NULL NULL -1250662632 -544.554649000000000000 +NULL NULL -1250662632 5454127198.951479000000000000 +NULL NULL -1250662632 93104.000000000000000000 +NULL NULL -1264372462 -6993985240226.000000000000000000 +NULL NULL -1264372462 -899.000000000000000000 +NULL NULL -1264372462 0.883000000000000000 +NULL NULL -1490239076 92253.232096000000000000 +NULL NULL -1681455031 -11105.372477000000000000 +NULL NULL -1681455031 -6.454300000000000000 +NULL NULL -1740848088 -9.157000000000000000 +NULL NULL -1740848088 0.506394259000000000 +NULL NULL -1740848088 901.441000000000000000 +NULL NULL -2048404259 -0.322296044625100000 +NULL NULL -2048404259 3939387044.100000000000000000 +NULL NULL -2123273881 -55.891980000000000000 +NULL NULL -2123273881 3.959000000000000000 +NULL NULL -243940373 -583.258000000000000000 +NULL NULL -243940373 -97176129669.654953000000000000 +NULL NULL -369457052 560.119078830904550000 +NULL NULL -369457052 7.700000000000000000 +NULL NULL -424713789 0.480000000000000000 +NULL NULL -466171792 0.000000000000000000 +NULL NULL -466171792 4227.534400000000000000 +NULL NULL -466171792 69.900000000000000000 +NULL NULL -477147437 6.000000000000000000 +NULL NULL -793950320 -0.100000000000000000 +NULL NULL -793950320 -16.000000000000000000 +NULL NULL -934092157 -7843850349.571300380000000000 +NULL NULL -99948814 -38076694.398100000000000000 +NULL NULL -99948814 -96386.438000000000000000 +NULL NULL 1039864870 0.700000000000000000 +NULL NULL 1039864870 94.040000000000000000 +NULL NULL 1039864870 987601.570000000000000000 +NULL NULL 1091836730 -5017.140000000000000000 +NULL NULL 1091836730 0.020000000000000000 +NULL NULL 1242586043 -4.000000000000000000 +NULL NULL 1242586043 -749975924224.630000000000000000 +NULL NULL 1242586043 71.148500000000000000 +NULL NULL 1479580778 92077343080.700000000000000000 +NULL NULL 150678276 -8278.000000000000000000 +NULL NULL 150678276 15989394.843600000000000000 +NULL NULL 1519948464 152.000000000000000000 +NULL NULL 1561921421 -5.405000000000000000 +NULL NULL 1561921421 53050.550000000000000000 +NULL NULL 1585021913 -5762331.066971120000000000 +NULL NULL 1585021913 607.227470000000000000 +NULL NULL 1585021913 745222.668089540000000000 +NULL NULL 1719049112 -7888197.000000000000000000 +NULL NULL 1738753776 -99817635066320.241600000000000000 +NULL NULL 1738753776 1525.280459649262000000 +NULL NULL 1755897735 -39.965207000000000000 +NULL NULL 1785750809 47443.115000000000000000 +NULL NULL 1801735854 -1760956929364.267000000000000000 +NULL NULL 1801735854 -438541294.700000000000000000 +NULL NULL 1816559437 -1035.700900000000000000 +NULL NULL 1909136587 -8610.078036935181000000 +NULL NULL 1909136587 181.076815359440000000 +NULL NULL 193709887 -0.566300000000000000 +NULL NULL 193709887 -19889.830000000000000000 +NULL NULL 193709887 0.800000000000000000 +NULL NULL 284554389 5.727146000000000000 +NULL NULL 294598722 -3542.600000000000000000 +NULL NULL 294598722 -9377326244.444000000000000000 +NULL NULL 448130683 -4302.485366846491000000 +NULL NULL 452719211 3020.293893074463600000 +NULL NULL 452719211 83003.437220000000000000 +NULL NULL 466567142 -58810.605860000000000000 +NULL NULL 466567142 -9763217822.129028000000000000 +NULL NULL 466567142 196.578529539858400000 +NULL NULL 560745412 678.250000000000000000 +NULL NULL 698032489 -330457.429262583900000000 +NULL NULL 891262439 -0.040000000000000000 +NULL NULL 90660785 -4564.517185000000000000 +NULL NULL 90660785 12590.288613000000000000 +NULL NULL NULL 1.089120893565337000 +NULL NULL NULL 4.261652270000000000 +NULL NULL NULL 682070836.264960300000000000 +PREHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:ROW__ID:struct] + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:ROW__ID:struct] + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: KEY.reducesinkkey0:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:int + className: VectorMapJoinFullOuterLongOperator + fullOuterSmallTableKeyMapping: 0 -> 1 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 1:int, 0:int + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1 + input vertices: + 0 Map 1 + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:int + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +-1780951928 NULL +-2038654700 -2038654700 +-670834064 NULL +-702028721 NULL +-702028721 NULL +-702028721 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +NULL -1003639073 +NULL -1014271154 +NULL -1036083124 +NULL -1210744742 +NULL -1323620496 +NULL -1379355738 +NULL -1712018127 +NULL -1792852276 +NULL -1912571616 +NULL -497171161 +NULL -683339273 +NULL -707688773 +NULL -747044796 +NULL -894799664 +NULL -932176731 +NULL 103640700 +NULL 1164387380 +NULL 1372592319 +NULL 1431997749 +NULL 1614287784 +NULL 162858059 +NULL 1635405412 +NULL 1685473722 +NULL 1780951928 +NULL 1825107160 +NULL 1831520491 +NULL 1840266070 +NULL 1997943409 +NULL 2119085509 +NULL 246169862 +NULL 260588085 +NULL 41376947 +NULL 436878811 +NULL 533298451 +NULL 670834064 +NULL 699007128 +NULL 699863556 +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: Lineage: fullouter_multikey_big_1a.key0 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a.key1 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key0 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key1 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: Lineage: fullouter_multikey_small_1a.key0 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a.key1 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key0 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key1 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:smallint, 1:key1:int, 2:ROW__ID:struct] + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key0:smallint, key1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:smallint, 1:key1:int, 2:ROW__ID:struct] + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key0:smallint, key1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aa + reduceColumnSortOrder: ++ + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:smallint, KEY.reducesinkkey1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + 1 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + Map Join Vectorization: + bigTableKeyColumns: 0:smallint, 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:smallint, 1:int + className: VectorMapJoinFullOuterMultiKeyOperator + fullOuterSmallTableKeyMapping: 0 -> 2, 1 -> 3 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 2:smallint, 3:int, 0:smallint, 1:int + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 2:smallint, 3:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:smallint, 1:int + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: smallint), _col3 (type: int) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zz + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:smallint, KEY.reducesinkkey1:int, VALUE._col0:smallint, VALUE._col1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: Lineage: fullouter_multikey_big_1b.key0 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key1 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key2 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: Lineage: fullouter_multikey_small_1b.key0 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key1 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key2 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.s_decimal SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:s_decimal, type:decimal(38,18), comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:timestamp, 1:key1:smallint, 2:key2:string, 3:ROW__ID:struct] + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:timestamp, 1:smallint, 2:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: key0:timestamp, key1:smallint, key2:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:timestamp, 1:key1:smallint, 2:key2:string, 3:s_decimal:decimal(38,18), 4:ROW__ID:struct] + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:timestamp, 1:smallint, 2:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 3:decimal(38,18) + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + value expressions: _col3 (type: decimal(38,18)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: key0:timestamp, key1:smallint, key2:string, s_decimal:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aaa + reduceColumnSortOrder: +++ + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:smallint, KEY.reducesinkkey2:string, VALUE._col0:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [timestamp, bigint, string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + 1 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + Map Join Vectorization: + bigTableKeyColumns: 0:timestamp, 1:smallint, 2:string + bigTableRetainColumnNums: [0, 1, 2, 3] + bigTableValueColumns: 0:timestamp, 1:smallint, 2:string, 3:decimal(38,18) + className: VectorMapJoinFullOuterMultiKeyOperator + fullOuterSmallTableKeyMapping: 0 -> 4, 1 -> 5, 2 -> 6 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 4:timestamp, 5:smallint, 6:string, 0:timestamp, 1:smallint, 2:string, 3:decimal(38,18) + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 4:timestamp, 5:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 6:string, 0:timestamp, 1:smallint, 2:string, 3:decimal(38,18) + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: string), _col3 (type: timestamp), _col4 (type: smallint), _col5 (type: string), _col6 (type: decimal(38,18)) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zz + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 7 + dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:smallint, VALUE._col0:string, VALUE._col1:timestamp, VALUE._col2:smallint, VALUE._col3:string, VALUE._col4:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), VALUE._col0 (type: string), VALUE._col1 (type: timestamp), VALUE._col2 (type: smallint), VALUE._col3 (type: string), VALUE._col4 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b +PREHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b +POSTHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 21635 ANCO NULL NULL NULL NULL +2082-07-14 04:00:40.695380469 12556 NCYBDW NULL NULL NULL NULL +2093-04-10 23:36:54.846 1446 GHZVPWFO NULL NULL NULL NULL +2093-04-10 23:36:54.846 28996 Q NULL NULL NULL NULL +2093-04-10 23:36:54.846 NULL NULL NULL NULL NULL NULL +2188-06-04 15:03:14.963259704 9468 AAA 2188-06-04 15:03:14.963259704 9468 AAA 2.754963520000000000 +2299-11-15 16:41:30.401 -31077 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 -6909 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 1446 NULL NULL NULL NULL NULL +2608-02-23 23:44:02.546440891 26184 NCYBDW NULL NULL NULL NULL +2686-05-23 07:46:46.565832918 13212 NCYBDW 2686-05-23 07:46:46.565832918 13212 NCYBDW -917116793.400000000000000000 +2686-05-23 07:46:46.565832918 NULL GHZVPWFO NULL NULL NULL NULL +2898-10-01 22:27:02.000871113 10361 NCYBDW NULL NULL NULL NULL +NULL -6909 NULL NULL NULL NULL NULL +NULL 21635 ANCO NULL NULL NULL NULL +NULL NULL CCWYD NULL NULL NULL NULL +NULL NULL NULL 1905-04-20 13:42:25.000469776 2638 KAUUFF 7.000000000000000000 +NULL NULL NULL 1919-06-20 00:16:50.611028595 20223 ZKBC -23.000000000000000000 +NULL NULL NULL 1931-12-04 11:13:47.269597392 23196 HVJCQMTQL -9697532.899400000000000000 +NULL NULL NULL 1941-10-16 02:19:36.000423663 -24459 AO -821445414.457971200000000000 +NULL NULL NULL 1957-02-01 14:00:29.000548421 -16085 ZVEUKC -2312.814900000000000000 +NULL NULL NULL 1957-03-06 09:57:31 -26373 NXLNNSO 2.000000000000000000 +NULL NULL NULL 1980-09-13 19:57:15 NULL M 57650.772300000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 7506645.953700000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 98790.713907420831000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -22419 LOTLS 342.372604022858400000 +NULL NULL NULL 2038-10-12 09:15:33.000539653 -19598 YKNIAJW -642807895924.660000000000000000 +NULL NULL NULL 2044-05-02 07:00:03.35 -8751 ZSMB -453797242.029791752000000000 +NULL NULL NULL 2071-07-21 20:02:32.000250697 2638 NRUV -66198.351092000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 5.000000000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 726945733.419300000000000000 +NULL NULL NULL 2075-10-25 20:32:40.000792874 NULL NULL 226612651968.360760000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR -394.086700000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR 67892053.023760940000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV -85184687349898.892000000000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 0.439686100000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 482.538341135921900000 +NULL NULL NULL 2105-01-04 16:27:45 23100 ZSMB -83.232800000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 NULL -9784.820000000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 UANGISEXR -5996.306000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -1515597428.000000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -4016.960800000000000000 +NULL NULL NULL 2201-07-05 17:22:06.084206844 -24459 UBGT 1.506948328200000000 +NULL NULL NULL 2238-05-17 19:27:25.519 20223 KQCM -0.010950000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ -0.261490000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ 37.728800000000000000 +NULL NULL NULL 2266-09-26 06:27:29.000284762 20223 EDYJJN 14.000000000000000000 +NULL NULL NULL 2301-06-03 17:16:19 15332 ZVEUKC 0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 -13125 JFYW 6.086657000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR 1279917802.420000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 12587 OPW -4.594895040000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T -0.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 2720.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 61.302000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G -4319470286240016.300000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G 975.000000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 30285 GSJPSIYOU 0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO -0.435500000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO 0.713517473350000000 +NULL NULL NULL 2309-01-15 12:43:49 22821 ZMY 40.900000000000000000 +NULL NULL NULL 2332-06-14 07:02:42.32 -26373 XFFFDTQ 56845106806308.900000000000000000 +NULL NULL NULL 2333-07-28 09:59:26 23196 RKSK 37872288434740893.500000000000000000 +NULL NULL NULL 2338-02-12 09:30:07 20223 CTH -6154.763054000000000000 +NULL NULL NULL 2340-12-15 05:15:17.133588982 23663 HHTP 33383.800000000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 74179461.880493000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 92.150000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -32.460000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -472.000000000000000000 +NULL NULL NULL 2391-01-17 15:28:37.00045143 16160 ZVEUKC 771355639420297.133000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB -5151598.347000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB 0.767183260000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -162.950000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -9926693851.000000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR 0.400000000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR -769088.176482000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR 93262.914526611000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -9575827.553960000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -991.436050000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB 8694.890000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB -582687.000000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB 67.417990000000000000 +NULL NULL NULL 2467-05-11 06:04:13.426693647 23196 EIBSDASR -8.554888380100000000 +NULL NULL NULL 2480-10-02 09:31:37.000770961 -26373 NBN -5875.519725200000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ -49.512190000000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ 0.445800000000000000 +NULL NULL NULL 2512-10-06 03:03:03 13195 CRJ 14.000000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X -922.695158410700000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X 761196.522000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 24313 QBHUG -8423.151573236000000000 +NULL NULL NULL 2512-10-06 03:03:03 32099 ARNZ -0.410000000000000000 +NULL NULL NULL 2525-05-12 15:59:35 -24459 SAVRGA 53106747151.863300000000000000 +NULL NULL NULL 2535-03-01 05:04:49.000525883 23663 ALIQKNXHE -0.166569100000000000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 41.774515077866460000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 6.801285170800000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -12923 PPTJPFR 5.400000000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -17786 HYEGQ -84.169614329419000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 7362887891522.378200000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 749563668434009.650000000000000000 +NULL NULL NULL 2668-06-25 07:12:37.000970744 2638 TJE -2.779682700000000000 +NULL NULL NULL 2688-02-06 20:58:42.000947837 20223 PAIY 67661.735000000000000000 +NULL NULL NULL 2743-12-27 05:16:19.000573579 -12914 ZVEUKC -811984611.517849700000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 368.000000000000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 60.602579700000000000 +NULL NULL NULL 2808-07-09 02:10:11.928498854 -19598 FHFX 0.300000000000000000 +NULL NULL NULL 2829-06-04 08:01:47.836 22771 ZVEUKC 94317.753180000000000000 +NULL NULL NULL 2861-05-27 07:13:01.000848622 -19598 WKPXNLXS 29399.000000000000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -4244.926206619000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -9951044.000000000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC -56082455.033918000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC 57.621752577880370000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 51.732330327300000000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 6370.000000000000000000 +NULL NULL NULL 2898-12-18 03:37:17 -24459 MHNBXPBM 14.236693562384810000 +NULL NULL NULL 2913-07-17 15:06:58.041 -10206 NULL -0.200000000000000000 +NULL NULL NULL 2938-12-21 23:35:59.498 29362 ZMY 0.880000000000000000 +NULL NULL NULL 2957-05-07 10:41:46 20223 OWQT -586953.153681000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF -96.300000000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF 2.157765900000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -18138 VDPN 8924831210.427680190000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -32485 AGEPWWLJF -48431309405.652522000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -375994644577.315257000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -81.000000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ 9.178000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 14500 WXLTRFQP -23.819800000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 6689 TFGVOGPJF -0.010000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -27394351.300000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -5.751278023000000000 +NULL NULL NULL NULL -12914 ZVEUKC 221.000000000000000000 +NULL NULL NULL NULL NULL NULL -2.400000000000000000 +NULL NULL NULL NULL NULL NULL -2207.300000000000000000 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: Lineage: fullouter_string_big_1a.key SIMPLE [(fullouter_string_big_1a_txt)fullouter_string_big_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: Lineage: fullouter_string_big_1a_nonull.key SIMPLE [(fullouter_string_big_1a_nonull_txt)fullouter_string_big_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: Lineage: fullouter_string_small_1a.key SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_date SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_timestamp SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.key SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_date SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_timestamp SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:s_date:date, 2:s_timestamp:timestamp, 3:ROW__ID:struct] + Select Operator + expressions: key (type: string), s_date (type: date), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date, 2:timestamp + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: timestamp) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: key:string, s_date:date, s_timestamp:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:date, VALUE._col1:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: string) + 1 KEY.reducesinkkey0 (type: string) + Map Join Vectorization: + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [0, 1, 2] + bigTableValueColumns: 0:string, 1:date, 2:timestamp + className: VectorMapJoinFullOuterStringOperator + fullOuterSmallTableKeyMapping: 0 -> 3 + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 3:string, 0:string, 1:date, 2:timestamp + hashTableImplementationType: OPTIMIZED + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 3:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:string, 1:date, 2:timestamp + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: date), _col3 (type: timestamp) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:string, VALUE._col1:date, VALUE._col2:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: date), VALUE._col2 (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_optimized_passthru.q.out ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_optimized_passthru.q.out new file mode 100644 index 0000000000..52ca0fbf6c --- /dev/null +++ ql/src/test/results/clientpositive/llap/vector_fullouter_mapjoin_1_optimized_passthru.q.out @@ -0,0 +1,3923 @@ +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: Lineage: fullouter_long_big_1a.key SIMPLE [(fullouter_long_big_1a_txt)fullouter_long_big_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull_txt(key bigint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: Lineage: fullouter_long_big_1a_nonull.key SIMPLE [(fullouter_long_big_1a_nonull_txt)fullouter_long_big_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: Lineage: fullouter_long_small_1a.key SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a.s_date SIMPLE [(fullouter_long_small_1a_txt)fullouter_long_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull_txt(key bigint, s_date date) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_long_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_long_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_long_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_long_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_long_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.key SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:key, type:bigint, comment:null), ] +POSTHOOK: Lineage: fullouter_long_small_1a_nonull.s_date SIMPLE [(fullouter_long_small_1a_nonull_txt)fullouter_long_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +PREHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Output: default@fullouter_long_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +PREHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Output: default@fullouter_long_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +PREHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a +PREHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a +POSTHOOK: Output: default@fullouter_long_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +PREHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1a_nonull +PREHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +POSTHOOK: Output: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:s_date:date, 2:ROW__ID:struct] + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:bigint, s_date:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: bigint) + 1 _col0 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: bigint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 80 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:bigint, 1:s_date:date, 2:ROW__ID:struct] + Select Operator + expressions: key (type: bigint), s_date (type: date) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Map-reduce partition columns: _col0 (type: bigint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date + Statistics: Num rows: 54 Data size: 3432 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:bigint, s_date:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:date + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: bigint) + 1 KEY.reducesinkkey0 (type: bigint) + Map Join Vectorization: + bigTableKeyExpressions: col 0:bigint + bigTableValueExpressions: col 0:bigint, col 1:date + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: bigint) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:bigint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:bigint, 2:date + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: date) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:date + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: date) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 59 Data size: 3775 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL 2024-01-23 +NULL NULL 2098-02-10 +NULL NULL 2242-02-08 +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1a_nonull +PREHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date FROM fullouter_long_big_1a_nonull b FULL OUTER JOIN fullouter_long_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1a_nonull +POSTHOOK: Input: default@fullouter_long_small_1a_nonull +#### A masked pattern was here #### +-5206670856103795573 NULL NULL +-5310365297525168078 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-6187919478609154811 NULL NULL +-8460550397108077433 NULL NULL +1569543799237464101 NULL NULL +3313583664488247651 NULL NULL +968819023021777205 NULL NULL +NULL -1339636982994067311 2000-06-20 +NULL -1339636982994067311 2008-12-03 +NULL -2098090254092150988 1817-03-12 +NULL -2098090254092150988 2163-05-26 +NULL -2098090254092150988 2219-12-23 +NULL -2184423060953067642 1853-07-06 +NULL -2184423060953067642 1880-10-06 +NULL -2575185053386712613 1809-07-12 +NULL -2575185053386712613 2105-01-21 +NULL -2688622006344936758 1948-10-15 +NULL -2688622006344936758 2129-01-11 +NULL -327698348664467755 2222-10-15 +NULL -3655445881497026796 2108-08-16 +NULL -4224290881682877258 1813-05-17 +NULL -4224290881682877258 2120-01-16 +NULL -4224290881682877258 2185-07-08 +NULL -4961171400048338491 2196-08-10 +NULL -5706981533666803767 1800-09-20 +NULL -5706981533666803767 2151-06-09 +NULL -5754527700632192146 1958-07-15 +NULL -614848861623872247 2101-05-25 +NULL -614848861623872247 2112-11-09 +NULL -6784441713807772877 1845-02-16 +NULL -6784441713807772877 2054-06-17 +NULL -7707546703881534780 2134-08-20 +NULL 214451696109242839 1855-05-12 +NULL 214451696109242839 1977-01-04 +NULL 214451696109242839 2179-04-18 +NULL 2438535236662373438 1881-09-16 +NULL 2438535236662373438 1916-01-10 +NULL 2438535236662373438 2026-06-23 +NULL 3845554233155411208 1805-11-10 +NULL 3845554233155411208 2264-04-05 +NULL 3873405809071478736 1918-11-20 +NULL 3873405809071478736 2034-06-09 +NULL 3873405809071478736 2164-04-23 +NULL 3905351789241845882 1866-07-28 +NULL 3905351789241845882 2045-12-05 +NULL 434940853096155515 2275-02-08 +NULL 4436884039838843341 2031-05-23 +NULL 5246983111579595707 1817-07-01 +NULL 5246983111579595707 2260-05-11 +NULL 5252407779338300447 2039-03-10 +NULL 5252407779338300447 2042-04-26 +NULL 6049335087268933751 2086-12-17 +NULL 6049335087268933751 2282-06-09 +NULL 7297177530102477725 1921-05-11 +NULL 7297177530102477725 1926-04-12 +NULL 7297177530102477725 2125-08-26 +NULL 7937120928560087303 2083-03-14 +NULL 8755921538765428593 1827-05-01 +PREHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: CREATE TABLE fullouter_long_big_1b(key smallint) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1b.txt' OVERWRITE INTO TABLE fullouter_long_big_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: CREATE TABLE fullouter_long_small_1b(key smallint, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1b.txt' OVERWRITE INTO TABLE fullouter_long_small_1b +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +PREHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Output: default@fullouter_long_big_1b +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +PREHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1b +PREHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1b compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1b +POSTHOOK: Output: default@fullouter_long_small_1b +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:smallint, 1:ROW__ID:struct] + Select Operator + expressions: key (type: smallint) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 63 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:smallint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:smallint, 1:s_timestamp:timestamp, 2:ROW__ID:struct] + Select Operator + expressions: key (type: smallint), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Map-reduce partition columns: _col0 (type: smallint) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:timestamp + Statistics: Num rows: 72 Data size: 2208 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: timestamp) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:smallint, s_timestamp:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:smallint, VALUE._col0:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint) + 1 KEY.reducesinkkey0 (type: smallint) + Map Join Vectorization: + bigTableKeyExpressions: col 0:smallint + bigTableValueExpressions: col 0:smallint, col 1:timestamp + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col2 + input vertices: + 0 Map 1 + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:smallint, 2:timestamp + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: smallint), _col2 (type: timestamp) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:smallint, VALUE._col0:smallint, VALUE._col1:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), VALUE._col0 (type: smallint), VALUE._col1 (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 79 Data size: 2428 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1b +PREHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_timestamp FROM fullouter_long_big_1b b FULL OUTER JOIN fullouter_long_small_1b s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1b +POSTHOOK: Input: default@fullouter_long_small_1b +#### A masked pattern was here #### +-25394 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +31713 NULL NULL +32030 32030 2101-09-09 07:35:05.145 +NULL -14172 1918-09-13 11:44:24.496926711 +NULL -14172 2355-01-14 23:23:34 +NULL -14172 2809-06-07 02:10:58 +NULL -15361 2219-09-15 20:15:03.000169887 +NULL -15361 2434-08-13 20:37:07.000172979 +NULL -15427 2023-11-09 19:31:21 +NULL -15427 2046-06-07 22:58:40.728 +NULL -15427 2355-01-08 12:34:11.617 +NULL -19167 2230-12-22 20:25:39.000242111 +NULL -19167 2319-08-26 11:07:11.268 +NULL -20517 2233-12-20 04:06:56.666522799 +NULL -20517 2774-06-23 12:04:06.5 +NULL -20824 2478-11-05 00:28:05 +NULL -22422 1949-03-13 00:07:53.075 +NULL -22422 2337-07-19 06:33:02.000353352 +NULL -22422 2982-12-28 06:30:26.000883228 +NULL -23117 2037-01-05 21:52:30.685952759 +NULL -24775 2035-03-26 08:11:23.375224153 +NULL -24775 2920-08-06 15:58:28.261059449 +NULL -26998 2268-08-04 12:48:11.848006292 +NULL -26998 2428-12-26 07:53:45.96925825 +NULL -26998 2926-07-18 09:02:46.077 +NULL -29600 2333-11-02 15:06:30 +NULL -30059 2269-05-04 21:23:44.000339209 +NULL -30059 2420-12-10 22:12:30 +NULL -30059 2713-10-13 09:28:49 +NULL -30306 2619-05-24 10:35:58.000774018 +NULL -4279 2214-09-10 03:53:06 +NULL -4279 2470-08-12 11:21:14.000955747 +NULL -7373 2662-10-28 12:07:02.000526564 +NULL -7624 2219-12-03 17:07:19 +NULL -7624 2289-08-28 00:14:34 +NULL -7624 2623-03-20 03:18:45.00006465 +NULL -8087 2550-06-26 23:57:42.588007617 +NULL -8087 2923-07-02 11:40:26.115 +NULL -8435 2642-02-07 11:45:04.353231638 +NULL -8435 2834-12-06 16:38:18.901 +NULL -8624 2120-02-15 15:36:40.000758423 +NULL -8624 2282-03-28 07:58:16 +NULL -8624 2644-05-04 04:45:07.839 +NULL 10553 2168-05-05 21:10:59.000152113 +NULL 11232 2038-04-06 14:53:59 +NULL 11232 2507-01-27 22:04:22.49661421 +NULL 11232 2533-11-26 12:22:18 +NULL 13598 2421-05-20 14:18:31.000264698 +NULL 13598 2909-06-25 23:22:50 +NULL 14865 2079-10-06 16:54:35.117 +NULL 14865 2220-02-28 03:41:36 +NULL 14865 2943-03-21 00:42:10.505 +NULL 17125 2236-07-14 01:54:40.927230276 +NULL 17125 2629-11-15 15:34:52 +NULL 21181 2253-03-12 11:55:48.332 +NULL 21181 2434-02-20 00:46:29.633 +NULL 21436 2526-09-22 23:44:55 +NULL 21436 2696-05-08 05:19:24.112 +NULL 24870 2752-12-26 12:32:23.03685163 +NULL 2632 2561-12-15 15:42:27 +NULL 26484 1919-03-04 07:32:37.519 +NULL 26484 2953-03-10 02:05:26.508953676 +NULL 2748 2298-06-20 21:01:24 +NULL 2748 2759-02-13 18:04:36.000307355 +NULL 2748 2862-04-20 13:12:39.482805897 +NULL 29407 2385-12-14 06:03:39.597 +NULL 3198 2223-04-14 13:20:49 +NULL 3198 2428-06-13 16:21:33.955 +NULL 3198 2736-12-20 03:59:50.343550301 +NULL 4510 2293-01-17 13:47:41.00001006 +NULL 4510 2777-03-24 03:44:28.000169723 +NULL NULL 2124-05-07 15:01:19.021 +NULL NULL 2933-06-20 11:48:09.000839488 +NULL NULL 2971-08-07 12:02:11.000948152 +NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: CREATE TABLE fullouter_long_big_1c(key int, b_string string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1c.txt' OVERWRITE INTO TABLE fullouter_long_big_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: CREATE TABLE fullouter_long_small_1c(key int, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1c.txt' OVERWRITE INTO TABLE fullouter_long_small_1c +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +PREHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Output: default@fullouter_long_big_1c +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +PREHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1c +PREHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1c compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1c +POSTHOOK: Output: default@fullouter_long_small_1c +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:b_string:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: int), b_string (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 11 Data size: 173 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, b_string:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:s_decimal:decimal(38,18), 2:ROW__ID:struct] + Select Operator + expressions: key (type: int), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:decimal(38,18) + Statistics: Num rows: 81 Data size: 1703 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: decimal(38,18)) + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, s_decimal:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + bigTableValueExpressions: col 0:int, col 1:decimal(38,18) + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string, 2:int, 3:decimal(38,18) + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: decimal(38,18)) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:string, VALUE._col1:int, VALUE._col2:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: string), VALUE._col1 (type: int), VALUE._col2 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 89 Data size: 1873 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1c +PREHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, b.b_string, s.key, s.s_decimal FROM fullouter_long_big_1c b FULL OUTER JOIN fullouter_long_small_1c s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1c +POSTHOOK: Input: default@fullouter_long_small_1c +#### A masked pattern was here #### +-1437463633 JU NULL NULL +-1437463633 NULL NULL NULL +-1437463633 SOWDWMS NULL NULL +-1437463633 TKTKGVGFW NULL NULL +-1437463633 YYXPPCH NULL NULL +1725068083 MKSCCE NULL NULL +1928928239 AMKTIWQ NULL NULL +1928928239 NULL NULL NULL +1928928239 NULL NULL NULL +1928928239 VAQHVRI NULL NULL +NULL ABBZ NULL NULL +NULL NULL -1093006502 -69.556658280000000000 +NULL NULL -1197550983 -0.558879692200000000 +NULL NULL -1197550983 0.100000000000000000 +NULL NULL -1197550983 71852.833867441261300000 +NULL NULL -1250662632 -544.554649000000000000 +NULL NULL -1250662632 5454127198.951479000000000000 +NULL NULL -1250662632 93104.000000000000000000 +NULL NULL -1264372462 -6993985240226.000000000000000000 +NULL NULL -1264372462 -899.000000000000000000 +NULL NULL -1264372462 0.883000000000000000 +NULL NULL -1490239076 92253.232096000000000000 +NULL NULL -1681455031 -11105.372477000000000000 +NULL NULL -1681455031 -6.454300000000000000 +NULL NULL -1740848088 -9.157000000000000000 +NULL NULL -1740848088 0.506394259000000000 +NULL NULL -1740848088 901.441000000000000000 +NULL NULL -2048404259 -0.322296044625100000 +NULL NULL -2048404259 3939387044.100000000000000000 +NULL NULL -2123273881 -55.891980000000000000 +NULL NULL -2123273881 3.959000000000000000 +NULL NULL -243940373 -583.258000000000000000 +NULL NULL -243940373 -97176129669.654953000000000000 +NULL NULL -369457052 560.119078830904550000 +NULL NULL -369457052 7.700000000000000000 +NULL NULL -424713789 0.480000000000000000 +NULL NULL -466171792 0.000000000000000000 +NULL NULL -466171792 4227.534400000000000000 +NULL NULL -466171792 69.900000000000000000 +NULL NULL -477147437 6.000000000000000000 +NULL NULL -793950320 -0.100000000000000000 +NULL NULL -793950320 -16.000000000000000000 +NULL NULL -934092157 -7843850349.571300380000000000 +NULL NULL -99948814 -38076694.398100000000000000 +NULL NULL -99948814 -96386.438000000000000000 +NULL NULL 1039864870 0.700000000000000000 +NULL NULL 1039864870 94.040000000000000000 +NULL NULL 1039864870 987601.570000000000000000 +NULL NULL 1091836730 -5017.140000000000000000 +NULL NULL 1091836730 0.020000000000000000 +NULL NULL 1242586043 -4.000000000000000000 +NULL NULL 1242586043 -749975924224.630000000000000000 +NULL NULL 1242586043 71.148500000000000000 +NULL NULL 1479580778 92077343080.700000000000000000 +NULL NULL 150678276 -8278.000000000000000000 +NULL NULL 150678276 15989394.843600000000000000 +NULL NULL 1519948464 152.000000000000000000 +NULL NULL 1561921421 -5.405000000000000000 +NULL NULL 1561921421 53050.550000000000000000 +NULL NULL 1585021913 -5762331.066971120000000000 +NULL NULL 1585021913 607.227470000000000000 +NULL NULL 1585021913 745222.668089540000000000 +NULL NULL 1719049112 -7888197.000000000000000000 +NULL NULL 1738753776 -99817635066320.241600000000000000 +NULL NULL 1738753776 1525.280459649262000000 +NULL NULL 1755897735 -39.965207000000000000 +NULL NULL 1785750809 47443.115000000000000000 +NULL NULL 1801735854 -1760956929364.267000000000000000 +NULL NULL 1801735854 -438541294.700000000000000000 +NULL NULL 1816559437 -1035.700900000000000000 +NULL NULL 1909136587 -8610.078036935181000000 +NULL NULL 1909136587 181.076815359440000000 +NULL NULL 193709887 -0.566300000000000000 +NULL NULL 193709887 -19889.830000000000000000 +NULL NULL 193709887 0.800000000000000000 +NULL NULL 284554389 5.727146000000000000 +NULL NULL 294598722 -3542.600000000000000000 +NULL NULL 294598722 -9377326244.444000000000000000 +NULL NULL 448130683 -4302.485366846491000000 +NULL NULL 452719211 3020.293893074463600000 +NULL NULL 452719211 83003.437220000000000000 +NULL NULL 466567142 -58810.605860000000000000 +NULL NULL 466567142 -9763217822.129028000000000000 +NULL NULL 466567142 196.578529539858400000 +NULL NULL 560745412 678.250000000000000000 +NULL NULL 698032489 -330457.429262583900000000 +NULL NULL 891262439 -0.040000000000000000 +NULL NULL 90660785 -4564.517185000000000000 +NULL NULL 90660785 12590.288613000000000000 +NULL NULL NULL 1.089120893565337000 +NULL NULL NULL 4.261652270000000000 +NULL NULL NULL 682070836.264960300000000000 +PREHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: CREATE TABLE fullouter_long_big_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_big_1d.txt' OVERWRITE INTO TABLE fullouter_long_big_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: CREATE TABLE fullouter_long_small_1d(key int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_long_small_1d.txt' OVERWRITE INTO TABLE fullouter_long_small_1d +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +PREHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_big_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Output: default@fullouter_long_big_1d +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +PREHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_long_small_1d +PREHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_long_small_1d compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_long_small_1d +POSTHOOK: Output: default@fullouter_long_small_1d +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:ROW__ID:struct] + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 12 Data size: 106 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:ROW__ID:struct] + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 39 Data size: 381 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: KEY.reducesinkkey0:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: int) + 1 KEY.reducesinkkey0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + bigTableValueExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1 + input vertices: + 0 Map 1 + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:int + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:int, VALUE._col0:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 42 Data size: 419 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_long_big_1d +PREHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key FROM fullouter_long_big_1d b FULL OUTER JOIN fullouter_long_small_1d s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_long_big_1d +POSTHOOK: Input: default@fullouter_long_small_1d +#### A masked pattern was here #### +-1780951928 NULL +-2038654700 -2038654700 +-670834064 NULL +-702028721 NULL +-702028721 NULL +-702028721 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +-814597051 NULL +NULL -1003639073 +NULL -1014271154 +NULL -1036083124 +NULL -1210744742 +NULL -1323620496 +NULL -1379355738 +NULL -1712018127 +NULL -1792852276 +NULL -1912571616 +NULL -497171161 +NULL -683339273 +NULL -707688773 +NULL -747044796 +NULL -894799664 +NULL -932176731 +NULL 103640700 +NULL 1164387380 +NULL 1372592319 +NULL 1431997749 +NULL 1614287784 +NULL 162858059 +NULL 1635405412 +NULL 1685473722 +NULL 1780951928 +NULL 1825107160 +NULL 1831520491 +NULL 1840266070 +NULL 1997943409 +NULL 2119085509 +NULL 246169862 +NULL 260588085 +NULL 41376947 +NULL 436878811 +NULL 533298451 +NULL 670834064 +NULL 699007128 +NULL 699863556 +NULL NULL +NULL NULL +NULL NULL +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: Lineage: fullouter_multikey_big_1a.key0 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a.key1 SIMPLE [(fullouter_multikey_big_1a_txt)fullouter_multikey_big_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key0 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1a_nonull.key1 SIMPLE [(fullouter_multikey_big_1a_nonull_txt)fullouter_multikey_big_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: Lineage: fullouter_multikey_small_1a.key0 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a.key1 SIMPLE [(fullouter_multikey_small_1a_txt)fullouter_multikey_small_1a_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull_txt(key0 smallint, key1 int) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key0 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key0, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1a_nonull.key1 SIMPLE [(fullouter_multikey_small_1a_nonull_txt)fullouter_multikey_small_1a_nonull_txt.FieldSchema(name:key1, type:int, comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +PREHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Output: default@fullouter_multikey_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +PREHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a +PREHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a +POSTHOOK: Output: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +PREHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +PREHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +POSTHOOK: Output: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:smallint, 1:key1:int, 2:ROW__ID:struct] + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key0:smallint, key1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:smallint, 1:key1:int, 2:ROW__ID:struct] + Select Operator + expressions: key0 (type: smallint), key1 (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: smallint), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 92 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key0:smallint, key1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aa + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + dataColumns: KEY.reducesinkkey0:smallint, KEY.reducesinkkey1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, bigint] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + 1 KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:smallint, col 1:int + bigTableValueExpressions: col 0:smallint, col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: smallint), _col1 (type: int) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:smallint, 1:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:smallint, 3:int + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: smallint), _col3 (type: int) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zz + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:smallint, KEY.reducesinkkey1:int, VALUE._col0:smallint, VALUE._col1:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: smallint), KEY.reducesinkkey1 (type: int), VALUE._col0 (type: smallint), VALUE._col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 101 Data size: 796 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL 1082230084 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +3556 NULL NULL NULL +NULL 1082230084 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +NULL NULL NULL NULL +PREHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1a_nonull +PREHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, s.key0, s.key1 FROM fullouter_multikey_big_1a_nonull b FULL OUTER JOIN fullouter_multikey_small_1a_nonull s ON b.key0 = s.key0 AND b.key1 = s.key1 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1a_nonull +POSTHOOK: Input: default@fullouter_multikey_small_1a_nonull +#### A masked pattern was here #### +-17582 -1730236061 NULL NULL +-17582 1082230084 NULL NULL +-17582 267529350 -17582 267529350 +-17582 827141667 NULL NULL +-17582 9637312 NULL NULL +-18222 -1969080993 NULL NULL +-6131 -1969080993 -6131 -1969080993 +1499 371855128 NULL NULL +22767 -1969080993 NULL NULL +3556 -1969080993 NULL NULL +NULL NULL -11868 -3536499 +NULL NULL -11868 -915441041 +NULL NULL -11868 1052120431 +NULL NULL -11868 1318114822 +NULL NULL -11868 1456809245 +NULL NULL -11868 1658440922 +NULL NULL -11868 930596435 +NULL NULL -11868 97203778 +NULL NULL -12252 1956403781 +NULL NULL -12252 964377504 +NULL NULL -15212 -2055239583 +NULL NULL -17788 -1361776766 +NULL NULL -17788 -738743861 +NULL NULL -17788 -872691214 +NULL NULL -17788 528419995 +NULL NULL -1787 -63842445 +NULL NULL -20125 -1995259010 +NULL NULL -20900 1078466156 +NULL NULL -22311 -2055239583 +NULL NULL -23457 -63842445 +NULL NULL -2407 1078466156 +NULL NULL -24206 -1456409156 +NULL NULL -24206 641361618 +NULL NULL -26894 -63842445 +NULL NULL -28129 -2055239583 +NULL NULL -28137 -63842445 +NULL NULL -28313 -706104224 +NULL NULL -28313 51228026 +NULL NULL -28313 837320573 +NULL NULL -4117 -1386947816 +NULL NULL -5734 1078466156 +NULL NULL -6061 -586336015 +NULL NULL -7386 -1635102480 +NULL NULL -7386 -2112062470 +NULL NULL -7386 100736776 +NULL NULL -980 -270600267 +NULL NULL -980 -333603940 +NULL NULL -980 -465544127 +NULL NULL -980 -801821285 +NULL NULL -980 1310479628 +NULL NULL -980 2009785365 +NULL NULL -980 356970043 +NULL NULL -980 628784462 +NULL NULL -980 712692345 +NULL NULL 11460 1078466156 +NULL NULL 12089 -63842445 +NULL NULL 13672 -63842445 +NULL NULL 14400 -825652334 +NULL NULL 15061 -63842445 +NULL NULL 15404 1078466156 +NULL NULL 16166 931172175 +NULL NULL 16696 -63842445 +NULL NULL 20156 -1618478138 +NULL NULL 20156 1165375499 +NULL NULL 20156 1855042153 +NULL NULL 20156 963883665 +NULL NULL 20969 -1995259010 +NULL NULL 21186 -586336015 +NULL NULL 22934 -1695419330 +NULL NULL 23015 -1893013623 +NULL NULL 23015 -217613200 +NULL NULL 23015 -252525791 +NULL NULL 23015 -276888585 +NULL NULL 23015 -696928205 +NULL NULL 23015 -893234501 +NULL NULL 23015 258882280 +NULL NULL 23015 564751472 +NULL NULL 26738 -2055239583 +NULL NULL 26944 -1995259010 +NULL NULL 30353 -1007182618 +NULL NULL 30353 -1011627089 +NULL NULL 30353 -1507157031 +NULL NULL 30353 105613996 +NULL NULL 30353 1241923267 +NULL NULL 30353 1364268303 +NULL NULL 30353 2044473567 +NULL NULL 31443 -1968665833 +NULL NULL 3412 -1196037018 +NULL NULL 3412 -1249487623 +NULL NULL 3412 -2081156563 +NULL NULL 3412 -2132472060 +NULL NULL 3412 1253976194 +NULL NULL 3890 1411429004 +NULL NULL 4586 -586336015 +NULL NULL 4779 -1995259010 +NULL NULL 4902 1078466156 +NULL NULL 5957 -1995259010 +NULL NULL 8177 -1995259010 +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b_txt(key0 timestamp, key1 smallint, key2 string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_big_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_big_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_big_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_big_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_big_1b +POSTHOOK: Lineage: fullouter_multikey_big_1b.key0 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key1 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_big_1b.key2 SIMPLE [(fullouter_multikey_big_1b_txt)fullouter_multikey_big_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b_txt(key0 timestamp, key1 smallint, key2 string, s_decimal decimal(38, 18)) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_multikey_small_1b.txt' OVERWRITE INTO TABLE fullouter_multikey_small_1b_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: query: CREATE TABLE fullouter_multikey_small_1b STORED AS ORC AS SELECT * FROM fullouter_multikey_small_1b_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_multikey_small_1b +POSTHOOK: Lineage: fullouter_multikey_small_1b.key0 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key0, type:timestamp, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key1 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key1, type:smallint, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.key2 SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:key2, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_multikey_small_1b.s_decimal SIMPLE [(fullouter_multikey_small_1b_txt)fullouter_multikey_small_1b_txt.FieldSchema(name:s_decimal, type:decimal(38,18), comment:null), ] +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +PREHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_big_1b_txt +PREHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_big_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_big_1b_txt +POSTHOOK: Output: default@fullouter_multikey_big_1b_txt +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +PREHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_multikey_small_1b_txt +PREHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_multikey_small_1b_txt compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_multikey_small_1b_txt +POSTHOOK: Output: default@fullouter_multikey_small_1b_txt +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:timestamp, 1:key1:smallint, 2:key2:string, 3:ROW__ID:struct] + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:timestamp, 1:smallint, 2:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 17 Data size: 1729 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: key0:timestamp, key1:smallint, key2:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key0:timestamp, 1:key1:smallint, 2:key2:string, 3:s_decimal:decimal(38,18), 4:ROW__ID:struct] + Select Operator + expressions: key0 (type: timestamp), key1 (type: smallint), key2 (type: string), s_decimal (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + sort order: +++ + Map-reduce partition columns: _col0 (type: timestamp), _col1 (type: smallint), _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + keyColumns: 0:timestamp, 1:smallint, 2:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 3:decimal(38,18) + Statistics: Num rows: 118 Data size: 28216 Basic stats: COMPLETE Column stats: NONE + value expressions: _col3 (type: decimal(38,18)) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + includeColumns: [0, 1, 2, 3] + dataColumns: key0:timestamp, key1:smallint, key2:string, s_decimal:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: aaa + reduceColumnSortOrder: +++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:smallint, KEY.reducesinkkey2:string, VALUE._col0:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [timestamp, bigint, string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + 1 KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: string) + Map Join Vectorization: + bigTableKeyExpressions: col 0:timestamp, col 1:smallint, col 2:string + bigTableValueExpressions: col 0:timestamp, col 1:smallint, col 2:string, col 3:decimal(38,18) + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: timestamp), _col1 (type: smallint) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:timestamp, 1:smallint + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:string, 3:timestamp, 4:smallint, 5:string, 6:decimal(38,18) + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: string), _col3 (type: timestamp), _col4 (type: smallint), _col5 (type: string), _col6 (type: decimal(38,18)) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: zz + reduceColumnSortOrder: ++ + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 7 + dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:smallint, VALUE._col0:string, VALUE._col1:timestamp, VALUE._col2:smallint, VALUE._col3:string, VALUE._col4:decimal(38,18) + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: smallint), VALUE._col0 (type: string), VALUE._col1 (type: timestamp), VALUE._col2 (type: smallint), VALUE._col3 (type: string), VALUE._col4 (type: decimal(38,18)) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6] + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 129 Data size: 31037 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_multikey_big_1b +PREHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key0, b.key1, b.key2, s.key0, s.key1, s.key2, s.s_decimal FROM fullouter_multikey_big_1b b FULL OUTER JOIN fullouter_multikey_small_1b s ON b.key0 = s.key0 AND b.key1 = s.key1 AND b.key2 = s.key2 +order by b.key0, b.key1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_multikey_big_1b +POSTHOOK: Input: default@fullouter_multikey_small_1b +#### A masked pattern was here #### +2061-12-19 22:10:32.000628309 21635 ANCO NULL NULL NULL NULL +2082-07-14 04:00:40.695380469 12556 NCYBDW NULL NULL NULL NULL +2093-04-10 23:36:54.846 1446 GHZVPWFO NULL NULL NULL NULL +2093-04-10 23:36:54.846 28996 Q NULL NULL NULL NULL +2093-04-10 23:36:54.846 NULL NULL NULL NULL NULL NULL +2188-06-04 15:03:14.963259704 9468 AAA 2188-06-04 15:03:14.963259704 9468 AAA 2.754963520000000000 +2299-11-15 16:41:30.401 -31077 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 -6909 NCYBDW NULL NULL NULL NULL +2306-06-21 11:02:00.143124239 1446 NULL NULL NULL NULL NULL +2608-02-23 23:44:02.546440891 26184 NCYBDW NULL NULL NULL NULL +2686-05-23 07:46:46.565832918 13212 NCYBDW 2686-05-23 07:46:46.565832918 13212 NCYBDW -917116793.400000000000000000 +2686-05-23 07:46:46.565832918 NULL GHZVPWFO NULL NULL NULL NULL +2898-10-01 22:27:02.000871113 10361 NCYBDW NULL NULL NULL NULL +NULL -6909 NULL NULL NULL NULL NULL +NULL 21635 ANCO NULL NULL NULL NULL +NULL NULL CCWYD NULL NULL NULL NULL +NULL NULL NULL 1905-04-20 13:42:25.000469776 2638 KAUUFF 7.000000000000000000 +NULL NULL NULL 1919-06-20 00:16:50.611028595 20223 ZKBC -23.000000000000000000 +NULL NULL NULL 1931-12-04 11:13:47.269597392 23196 HVJCQMTQL -9697532.899400000000000000 +NULL NULL NULL 1941-10-16 02:19:36.000423663 -24459 AO -821445414.457971200000000000 +NULL NULL NULL 1957-02-01 14:00:29.000548421 -16085 ZVEUKC -2312.814900000000000000 +NULL NULL NULL 1957-03-06 09:57:31 -26373 NXLNNSO 2.000000000000000000 +NULL NULL NULL 1980-09-13 19:57:15 NULL M 57650.772300000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 7506645.953700000000000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -12202 VBDBM 98790.713907420831000000 +NULL NULL NULL 2018-11-25 22:27:55.84 -22419 LOTLS 342.372604022858400000 +NULL NULL NULL 2038-10-12 09:15:33.000539653 -19598 YKNIAJW -642807895924.660000000000000000 +NULL NULL NULL 2044-05-02 07:00:03.35 -8751 ZSMB -453797242.029791752000000000 +NULL NULL NULL 2071-07-21 20:02:32.000250697 2638 NRUV -66198.351092000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 5.000000000000000000 +NULL NULL NULL 2073-03-21 15:32:57.617920888 26425 MPRACIRYW 726945733.419300000000000000 +NULL NULL NULL 2075-10-25 20:32:40.000792874 NULL NULL 226612651968.360760000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR -394.086700000000000000 +NULL NULL NULL 2083-06-07 09:35:19.383 -26373 MR 67892053.023760940000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV -85184687349898.892000000000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 0.439686100000000000 +NULL NULL NULL 2086-04-09 00:03:10 20223 THXNJGFFV 482.538341135921900000 +NULL NULL NULL 2105-01-04 16:27:45 23100 ZSMB -83.232800000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 NULL -9784.820000000000000000 +NULL NULL NULL 2145-10-15 06:58:42.831 2638 UANGISEXR -5996.306000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -1515597428.000000000000000000 +NULL NULL NULL 2169-04-02 06:30:32 23855 PDVQATOS -4016.960800000000000000 +NULL NULL NULL 2201-07-05 17:22:06.084206844 -24459 UBGT 1.506948328200000000 +NULL NULL NULL 2238-05-17 19:27:25.519 20223 KQCM -0.010950000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ -0.261490000000000000 +NULL NULL NULL 2242-08-04 07:51:46.905 20223 UCYXACQ 37.728800000000000000 +NULL NULL NULL 2266-09-26 06:27:29.000284762 20223 EDYJJN 14.000000000000000000 +NULL NULL NULL 2301-06-03 17:16:19 15332 ZVEUKC 0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 -13125 JFYW 6.086657000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR -0.500000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 11101 YJCKKCR 1279917802.420000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 12587 OPW -4.594895040000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T -0.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 2720.800000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 1301 T 61.302000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G -4319470286240016.300000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 15090 G 975.000000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 30285 GSJPSIYOU 0.200000000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO -0.435500000000000000 +NULL NULL NULL 2304-12-15 15:31:16 8650 RLNO 0.713517473350000000 +NULL NULL NULL 2309-01-15 12:43:49 22821 ZMY 40.900000000000000000 +NULL NULL NULL 2332-06-14 07:02:42.32 -26373 XFFFDTQ 56845106806308.900000000000000000 +NULL NULL NULL 2333-07-28 09:59:26 23196 RKSK 37872288434740893.500000000000000000 +NULL NULL NULL 2338-02-12 09:30:07 20223 CTH -6154.763054000000000000 +NULL NULL NULL 2340-12-15 05:15:17.133588982 23663 HHTP 33383.800000000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 74179461.880493000000000000 +NULL NULL NULL 2355-09-23 19:52:34.638084141 -19598 H 92.150000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -32.460000000000000000 +NULL NULL NULL 2357-05-08 07:09:09.000482799 6226 ZSMB -472.000000000000000000 +NULL NULL NULL 2391-01-17 15:28:37.00045143 16160 ZVEUKC 771355639420297.133000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB -5151598.347000000000000000 +NULL NULL NULL 2396-04-06 15:39:02.404013577 29661 ZSMB 0.767183260000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -162.950000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR -9926693851.000000000000000000 +NULL NULL NULL 2409-09-23 10:33:27 2638 XSXR 0.400000000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR -769088.176482000000000000 +NULL NULL NULL 2410-05-03 13:44:56 2638 PHOR 93262.914526611000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -9575827.553960000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB -991.436050000000000000 +NULL NULL NULL 2461-03-09 09:54:45.000982385 -16454 ZSMB 8694.890000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB -582687.000000000000000000 +NULL NULL NULL 2462-12-16 23:11:32.633305644 -26373 CB 67.417990000000000000 +NULL NULL NULL 2467-05-11 06:04:13.426693647 23196 EIBSDASR -8.554888380100000000 +NULL NULL NULL 2480-10-02 09:31:37.000770961 -26373 NBN -5875.519725200000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ -49.512190000000000000 +NULL NULL NULL 2512-10-06 03:03:03 -3465 VZQ 0.445800000000000000 +NULL NULL NULL 2512-10-06 03:03:03 13195 CRJ 14.000000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X -922.695158410700000000 +NULL NULL NULL 2512-10-06 03:03:03 1560 X 761196.522000000000000000 +NULL NULL NULL 2512-10-06 03:03:03 24313 QBHUG -8423.151573236000000000 +NULL NULL NULL 2512-10-06 03:03:03 32099 ARNZ -0.410000000000000000 +NULL NULL NULL 2525-05-12 15:59:35 -24459 SAVRGA 53106747151.863300000000000000 +NULL NULL NULL 2535-03-01 05:04:49.000525883 23663 ALIQKNXHE -0.166569100000000000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 41.774515077866460000 +NULL NULL NULL 2629-04-07 01:54:11 -6776 WGGFVFTW 6.801285170800000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -12923 PPTJPFR 5.400000000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 -17786 HYEGQ -84.169614329419000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 7362887891522.378200000000000000 +NULL NULL NULL 2637-03-12 22:25:46.385 21841 CXTI 749563668434009.650000000000000000 +NULL NULL NULL 2668-06-25 07:12:37.000970744 2638 TJE -2.779682700000000000 +NULL NULL NULL 2688-02-06 20:58:42.000947837 20223 PAIY 67661.735000000000000000 +NULL NULL NULL 2743-12-27 05:16:19.000573579 -12914 ZVEUKC -811984611.517849700000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 368.000000000000000000 +NULL NULL NULL 2759-11-26 22:19:55.410967136 -27454 ZMY 60.602579700000000000 +NULL NULL NULL 2808-07-09 02:10:11.928498854 -19598 FHFX 0.300000000000000000 +NULL NULL NULL 2829-06-04 08:01:47.836 22771 ZVEUKC 94317.753180000000000000 +NULL NULL NULL 2861-05-27 07:13:01.000848622 -19598 WKPXNLXS 29399.000000000000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -4244.926206619000000000 +NULL NULL NULL 2882-05-20 07:21:25.221299462 23196 U -9951044.000000000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC -56082455.033918000000000000 +NULL NULL NULL 2888-05-08 08:36:55.182302102 5786 ZVEUKC 57.621752577880370000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 51.732330327300000000 +NULL NULL NULL 2897-08-10 15:21:47.09 23663 XYUVBED 6370.000000000000000000 +NULL NULL NULL 2898-12-18 03:37:17 -24459 MHNBXPBM 14.236693562384810000 +NULL NULL NULL 2913-07-17 15:06:58.041 -10206 NULL -0.200000000000000000 +NULL NULL NULL 2938-12-21 23:35:59.498 29362 ZMY 0.880000000000000000 +NULL NULL NULL 2957-05-07 10:41:46 20223 OWQT -586953.153681000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF -96.300000000000000000 +NULL NULL NULL 2960-04-12 07:03:42.000366651 20340 CYZYUNSF 2.157765900000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -18138 VDPN 8924831210.427680190000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -32485 AGEPWWLJF -48431309405.652522000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -375994644577.315257000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ -81.000000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 -8913 UIMQ 9.178000000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 14500 WXLTRFQP -23.819800000000000000 +NULL NULL NULL 2969-01-23 14:08:04.000667259 6689 TFGVOGPJF -0.010000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -27394351.300000000000000000 +NULL NULL NULL 2971-02-14 09:13:19 -16605 BVACIRP -5.751278023000000000 +NULL NULL NULL NULL -12914 ZVEUKC 221.000000000000000000 +NULL NULL NULL NULL NULL NULL -2.400000000000000000 +NULL NULL NULL NULL NULL NULL -2207.300000000000000000 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: Lineage: fullouter_string_big_1a.key SIMPLE [(fullouter_string_big_1a_txt)fullouter_string_big_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull_txt(key string) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_big_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_big_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_big_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_big_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_big_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_big_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_big_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: Lineage: fullouter_string_big_1a_nonull.key SIMPLE [(fullouter_string_big_1a_nonull_txt)fullouter_string_big_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: Lineage: fullouter_string_small_1a.key SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_date SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a.s_timestamp SIMPLE [(fullouter_string_small_1a_txt)fullouter_string_small_1a_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull_txt(key string, s_date date, s_timestamp timestamp) +row format delimited fields terminated by ',' +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/fullouter_string_small_1a_nonull.txt' OVERWRITE INTO TABLE fullouter_string_small_1a_nonull_txt +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@fullouter_string_small_1a_nonull_txt +PREHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +PREHOOK: type: CREATETABLE_AS_SELECT +PREHOOK: Input: default@fullouter_string_small_1a_nonull_txt +PREHOOK: Output: database:default +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: CREATE TABLE fullouter_string_small_1a_nonull STORED AS ORC AS SELECT * FROM fullouter_string_small_1a_nonull_txt +POSTHOOK: type: CREATETABLE_AS_SELECT +POSTHOOK: Input: default@fullouter_string_small_1a_nonull_txt +POSTHOOK: Output: database:default +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.key SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_date SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_date, type:date, comment:null), ] +POSTHOOK: Lineage: fullouter_string_small_1a_nonull.s_timestamp SIMPLE [(fullouter_string_small_1a_nonull_txt)fullouter_string_small_1a_nonull_txt.FieldSchema(name:s_timestamp, type:timestamp, comment:null), ] +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +PREHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Output: default@fullouter_string_big_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +PREHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_big_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Output: default@fullouter_string_big_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +PREHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a +PREHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a +POSTHOOK: Output: default@fullouter_string_small_1a +#### A masked pattern was here #### +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +PREHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +PREHOOK: type: ANALYZE_TABLE +PREHOOK: Input: default@fullouter_string_small_1a_nonull +PREHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: analyze table fullouter_string_small_1a_nonull compute statistics for columns +POSTHOOK: type: ANALYZE_TABLE +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +POSTHOOK: Output: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 3 <- Map 1 (CUSTOM_SIMPLE_EDGE), Map 2 (CUSTOM_SIMPLE_EDGE) + Reducer 4 <- Reducer 3 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 13 Data size: 1056 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + includeColumns: [0] + dataColumns: key:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: s + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:s_date:date, 2:s_timestamp:timestamp, 3:ROW__ID:struct] + Select Operator + expressions: key (type: string), s_date (type: date), s_timestamp (type: timestamp) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2] + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:date, 2:timestamp + Statistics: Num rows: 38 Data size: 6606 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: timestamp) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: key:string, s_date:date, s_timestamp:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 3 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:date, VALUE._col1:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [string] + Reduce Operator Tree: + Map Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 KEY.reducesinkkey0 (type: string) + 1 KEY.reducesinkkey0 (type: string) + Map Join Vectorization: + bigTableKeyExpressions: col 0:string + bigTableValueExpressions: col 0:string, col 1:date, col 2:timestamp + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + DynamicPartitionHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string, 2:date, 3:timestamp + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string), _col2 (type: date), _col3 (type: timestamp) + Reducer 4 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: z + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:string, VALUE._col1:date, VALUE._col2:timestamp + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: date), VALUE._col2 (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 41 Data size: 7266 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL 1865-11-08 2893-04-07 07:36:12 +NULL NULL 1915-02-22 2554-10-27 09:34:30 +NULL NULL 2250-04-22 2548-03-21 08:23:13.133573801 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL NULL NULL NULL +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +PREHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@fullouter_string_big_1a_nonull +PREHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +POSTHOOK: query: SELECT b.key, s.key, s.s_date, s.s_timestamp FROM fullouter_string_big_1a_nonull b FULL OUTER JOIN fullouter_string_small_1a_nonull s ON b.key = s.key +order by b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@fullouter_string_big_1a_nonull +POSTHOOK: Input: default@fullouter_string_small_1a_nonull +#### A masked pattern was here #### +FTWURVH FTWURVH 1976-03-10 2683-11-22 13:07:04.66673556 +MXGDMBD MXGDMBD 1880-11-01 2765-10-06 13:28:17.000688592 +NULL 1985-01-22 2111-01-10 15:44:28 +NULL 2021-02-21 2802-04-21 18:48:18.5933838 +NULL AARNZRVZQ 2000-11-13 2309-06-05 19:54:13 +NULL AARNZRVZQ 2002-10-23 2525-05-12 15:59:35 +NULL ATZJTPECF 1829-10-16 2357-05-08 07:09:09.000482799 +NULL ATZJTPECF 2217-10-22 2808-10-20 16:01:24.558 +NULL BDBMW 2278-04-27 2101-02-21 08:53:34.692 +NULL BEP 2141-02-19 2521-06-09 01:20:07.121 +NULL BEP 2206-08-10 2331-10-09 10:59:51 +NULL CQMTQLI 2031-09-13 1927-02-13 08:39:25.000919094 +NULL CQMTQLI 2090-11-13 2693-03-17 16:19:55.82 +NULL FROPIK 2023-02-28 2467-05-11 06:04:13.426693647 +NULL FROPIK 2124-10-01 2974-07-06 12:05:08.000146048 +NULL FROPIK 2214-02-09 1949-08-18 17:14:38.000703738 +NULL FYW 1807-03-20 2305-08-17 01:32:44 +NULL GOYJHW 1959-04-27 NULL +NULL GOYJHW 1976-03-06 2805-07-10 10:51:57.00083302 +NULL GOYJHW 1993-04-07 1950-05-04 09:28:22.000114784 +NULL GSJPSIYOU 1948-07-17 2006-09-24 16:01:24.000239251 +NULL IOQIDQBHU 2198-02-08 2073-03-21 15:32:57.617920888 +NULL IWEZJHKE NULL NULL +NULL KL 1980-09-22 2073-08-25 11:51:10.318 +NULL LOTLS 1957-11-09 2092-06-07 06:42:30.000538454 +NULL LOTLS 2099-08-04 2181-01-25 01:04:25.000030055 +NULL LOTLS 2126-09-16 1977-12-15 15:28:56 +NULL NADANUQMW 2037-10-19 2320-04-26 18:50:25.000426922 +NULL QTSRKSKB 2144-01-13 2627-12-20 03:38:53.000389266 +NULL SDA 2196-04-12 2462-10-26 19:28:12.733 +NULL VNRXWQ 1883-02-06 2287-07-17 16:46:58.287 +NULL VNRXWQ 2276-11-16 2072-08-16 17:45:47.48349887 +NULL WNGFTTY 1843-06-10 2411-01-28 20:03:59 +NULL WNGFTTY 2251-08-16 2649-12-21 18:30:42.498 +NULL ZNOUDCR NULL 1988-04-23 08:40:21 +PXLD NULL NULL NULL +PXLD NULL NULL NULL +PXLD NULL NULL NULL +QNCYBDW NULL NULL NULL +UA NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL +WXHJ NULL NULL NULL diff --git ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out index 5c0d6bbb73..6eaf7ade24 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_cube1.q.out @@ -72,10 +72,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -209,10 +209,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -372,10 +372,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -632,11 +632,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -692,11 +692,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -986,11 +986,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Select Operator @@ -1021,11 +1021,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -1081,11 +1081,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -1184,11 +1184,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string + valueColumns: 3:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 6 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out index 1ffa0fdd80..f018a61b99 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id1.q.out @@ -84,10 +84,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -243,10 +242,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -402,10 +400,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -555,10 +552,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -708,10 +704,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -868,10 +863,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out index dce2930943..ff300a06c5 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out @@ -87,11 +87,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -147,11 +147,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:int, 1:int + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -289,11 +289,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -349,11 +349,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:int, 1:int + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -501,11 +501,10 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [3] - valueColumnNums: [] + partitionColumns: 3:double Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -558,11 +557,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [] + partitionColumns: 0:int, 1:int Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized, llap @@ -620,11 +618,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [1] + partitionColumns: 2:double + valueColumns: 1:bigint Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 4 @@ -663,10 +661,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 5 @@ -808,11 +806,10 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [3] - valueColumnNums: [] + partitionColumns: 3:double Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -865,11 +862,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [] + partitionColumns: 0:int, 1:int Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized, llap @@ -927,11 +923,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [1] + partitionColumns: 2:double + valueColumns: 1:bigint Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 4 @@ -970,10 +966,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 5 @@ -1111,11 +1107,10 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [3] - valueColumnNums: [] + partitionColumns: 3:double Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1168,11 +1163,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [] + partitionColumns: 0:int, 1:int Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) @@ -1180,11 +1174,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [] + partitionColumns: 0:int, 1:int Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized, llap @@ -1228,10 +1221,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE Reducer 4 Execution mode: llap @@ -1251,6 +1243,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 5 Execution mode: vectorized, llap Reduce Vectorization: @@ -1293,10 +1288,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -1439,11 +1433,10 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [3] - valueColumnNums: [] + partitionColumns: 3:double Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1496,11 +1489,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [] + partitionColumns: 0:int, 1:int Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) @@ -1508,11 +1500,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [] + partitionColumns: 0:int, 1:int Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized, llap @@ -1556,10 +1547,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE Reducer 4 Execution mode: llap @@ -1579,6 +1569,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 5 Execution mode: vectorized, llap Reduce Vectorization: @@ -1621,10 +1614,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -1760,10 +1752,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -1926,10 +1918,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2004,10 +1995,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 3 @@ -2144,10 +2135,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) @@ -2155,10 +2145,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2219,10 +2208,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: llap @@ -2242,6 +2230,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -2284,10 +2275,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 72 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out index 02f4683c66..c090051ce3 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id3.q.out @@ -101,11 +101,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), 1L (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 4] + keyColumns: 0:int, 1:int, 4:bigint keyExpressions: ConstantVectorExpression(val 1) -> 4:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -270,10 +270,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out index 1229c6d1a1..eb5480d5ab 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets1.q.out @@ -102,10 +102,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -264,10 +264,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -426,10 +426,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -588,10 +588,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -744,10 +744,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:string, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 9936 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -900,10 +899,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 1104 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1041,10 +1039,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out index daaf17f905..195ea0cf27 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets2.q.out @@ -88,10 +88,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap @@ -147,10 +147,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -269,10 +269,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap @@ -328,10 +328,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -503,10 +503,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:double Statistics: Num rows: 24 Data size: 13248 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: double) Reducer 3 @@ -673,10 +673,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 3 Data size: 534 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap @@ -732,10 +732,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 2232 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: bigint) Reducer 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out index dce648adea..b15a993a08 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3.q.out @@ -368,10 +368,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5] + valueColumns: 3:double, 4:bigint, 5:bigint Statistics: Num rows: 48 Data size: 26496 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: double), _col4 (type: bigint), _col5 (type: bigint) Reducer 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out index d00306b639..2350830bd8 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets3_dec.q.out @@ -95,10 +95,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5] + valueColumns: 3:decimal(20,2), 4:bigint, 5:bigint Statistics: Num rows: 48 Data size: 23040 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: decimal(20,2)), _col4 (type: bigint), _col5 (type: bigint) Execution mode: vectorized, llap @@ -234,10 +234,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5] + valueColumns: 3:decimal(20,2), 4:bigint, 5:bigint Statistics: Num rows: 48 Data size: 23040 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: decimal(20,2)), _col4 (type: bigint), _col5 (type: bigint) Execution mode: vectorized, llap @@ -399,10 +399,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3, 4] + valueColumns: 2:decimal(20,2), 3:bigint, 4:bigint Statistics: Num rows: 12 Data size: 5760 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: decimal(20,2)), _col3 (type: bigint), _col4 (type: bigint) Execution mode: vectorized, llap @@ -458,10 +458,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5] + valueColumns: 3:decimal(20,2), 4:bigint, 5:bigint Statistics: Num rows: 48 Data size: 23040 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: decimal(20,2)), _col4 (type: bigint), _col5 (type: bigint) Reducer 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out index 3acc4ec933..31ccb5e710 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets4.q.out @@ -97,10 +97,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reduce Output Operator @@ -109,10 +109,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -184,10 +184,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: bigint) Reducer 3 @@ -208,6 +208,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -260,10 +263,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: bigint) @@ -342,10 +345,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reduce Output Operator @@ -354,10 +357,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -429,10 +432,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: bigint) Reducer 3 @@ -453,6 +456,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -505,10 +511,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: bigint) @@ -618,10 +624,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 2 Data size: 736 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized, llap @@ -677,10 +683,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reduce Output Operator @@ -689,10 +695,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 8 Data size: 2944 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -747,10 +753,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: bigint) Reducer 4 @@ -771,6 +777,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 5 Execution mode: vectorized, llap Reduce Vectorization: @@ -823,10 +832,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 4 Data size: 1472 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: bigint) diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out index bbfba28e31..07c4eed6f0 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets5.q.out @@ -88,10 +88,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -158,10 +157,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -280,10 +279,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -350,10 +348,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -499,10 +497,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -569,10 +566,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 3 Data size: 1104 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Reducer 3 @@ -611,10 +608,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 4 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out index e26b6c5db4..4563bd6c3e 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets6.q.out @@ -88,10 +88,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -231,10 +230,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out index d75f2d8517..a5a3758211 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_grouping.q.out @@ -88,10 +88,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -249,10 +248,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -417,10 +415,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -586,10 +583,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -657,10 +653,10 @@ STAGE PLANS: sort order: -+ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5, 4] + keyColumns: 5:bigint, 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:int, 1:int Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int) Reducer 3 @@ -668,7 +664,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: za + reduceColumnNullOrder: zz reduceColumnSortOrder: -+ allNative: false usesVectorUDFAdaptor: false @@ -792,10 +788,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -953,10 +948,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1128,10 +1122,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1290,10 +1283,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1354,11 +1346,11 @@ STAGE PLANS: sort order: -+ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5, 4] + keyColumns: 5:bigint, 4:int keyExpressions: IfExprColumnNull(col 3:boolean, col 0:int, null)(children: LongColEqualLongScalar(col 5:bigint, val 1) -> 3:boolean, col 0:int) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:int, 1:int Statistics: Num rows: 12 Data size: 96 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: int) Reducer 3 @@ -1366,7 +1358,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: za + reduceColumnNullOrder: zz reduceColumnSortOrder: -+ allNative: false usesVectorUDFAdaptor: false @@ -1490,10 +1482,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1646,10 +1637,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1804,10 +1794,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1953,10 +1942,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2119,10 +2107,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2285,10 +2272,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -2446,10 +2432,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:int, 1:int, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 144 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out index 1f49804ca6..e7c235aa78 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_sets_limit.q.out @@ -97,10 +97,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint) @@ -126,7 +126,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zza reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -165,10 +165,10 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col2 (type: bigint) @@ -177,7 +177,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -233,11 +233,11 @@ a b _c2 2 2 1 2 3 1 2 NULL 2 +3 2 1 3 NULL 1 -NULL 1 2 -NULL 2 3 -NULL 3 1 -NULL NULL 6 +5 2 1 +5 NULL 1 +8 1 1 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT a, b, count(*) FROM T1_n110 GROUP BY a, b GROUPING SETS (a, (a, b), b, ()) order by a, b LIMIT 10 PREHOOK: type: QUERY @@ -307,10 +307,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 24 Data size: 8832 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint) @@ -336,7 +336,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zza reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -375,10 +375,10 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col2 (type: bigint) @@ -387,7 +387,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -443,11 +443,11 @@ a b _c2 2 2 1 2 3 1 2 NULL 2 +3 2 1 3 NULL 1 -NULL 1 2 -NULL 2 3 -NULL 3 1 -NULL NULL 6 +5 2 1 +5 NULL 1 +8 1 1 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT a, b, count(*) FROM T1_n110 GROUP BY a, b GROUPING SETS (a, (a, b)) order by a, b LIMIT 10 PREHOOK: type: QUERY @@ -517,10 +517,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 12 Data size: 4416 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col3 (type: bigint) @@ -546,7 +546,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zza reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -585,10 +585,10 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:bigint Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col2 (type: bigint) @@ -597,7 +597,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -657,7 +657,7 @@ a b _c2 3 NULL 1 5 2 1 5 NULL 1 -8 NULL 1 +8 1 1 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT a FROM T1_n110 GROUP BY a, b, c GROUPING SETS (a, b, c) order by a LIMIT 10 PREHOOK: type: QUERY @@ -725,10 +725,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:string, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 18 Data size: 9936 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -753,7 +752,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaa + reduceColumnNullOrder: zaaa reduceColumnSortOrder: ++++ allNative: false usesVectorUDFAdaptor: false @@ -790,10 +789,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9 Data size: 4968 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Reducer 3 @@ -801,7 +799,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -853,10 +851,10 @@ POSTHOOK: Input: default@t1_n110 #### A masked pattern was here #### a 1 -NULL -NULL -NULL -NULL +2 +3 +5 +8 NULL NULL NULL @@ -929,10 +927,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 1104 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -957,7 +954,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -985,10 +982,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Reducer 3 @@ -996,7 +992,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1122,10 +1118,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 6 Data size: 2208 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint) @@ -1151,7 +1147,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1181,10 +1177,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 3 Data size: 1104 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint) @@ -1193,7 +1189,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out index 5d81631091..abf1c89c68 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_grouping_window.q.out @@ -86,10 +86,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3] + valueColumns: 2:int, 3:int Statistics: Num rows: 3 Data size: 60 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: int) Execution mode: vectorized, llap @@ -153,11 +153,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 2] + keyColumns: 0:int, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [1] + partitionColumns: 0:int + valueColumns: 1:int Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int) Reducer 3 @@ -165,7 +165,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -193,7 +193,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out index c7f38bdbf7..26795d2ad5 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_mapjoin.q.out @@ -57,6 +57,7 @@ STAGE PLANS: className: VectorMapJoinInnerMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Reducer 4 @@ -71,6 +72,7 @@ STAGE PLANS: className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3, _col5 input vertices: 1 Reducer 5 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out index d1f8ac5505..abf352dc20 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_rollup1.q.out @@ -84,10 +84,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -339,11 +339,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -399,11 +399,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -685,11 +685,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Select Operator @@ -720,11 +720,11 @@ STAGE PLANS: Map-reduce partition columns: rand() (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [3] + partitionColumns: 4:double + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Execution mode: vectorized, llap @@ -780,11 +780,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 3 @@ -873,11 +873,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string + valueColumns: 3:bigint Statistics: Num rows: 18 Data size: 6624 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint) Reducer 5 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out index 6182c90fc1..edc1faeb6b 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_sort_11.q.out @@ -86,10 +86,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -509,10 +508,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 5 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -576,10 +574,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Reducer 3 @@ -693,10 +690,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -766,10 +762,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Reducer 3 @@ -873,10 +868,9 @@ STAGE PLANS: Map-reduce partition columns: key (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 10 Data size: 850 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -928,10 +922,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 6 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Reducer 3 diff --git ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out index 5e946c4e9c..d3ba68868d 100644 --- ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out +++ ql/src/test/results/clientpositive/llap/vector_groupby_sort_8.q.out @@ -95,10 +95,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out index 041990a12f..c5e1daea33 100644 --- ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out +++ ql/src/test/results/clientpositive/llap/vector_include_no_sel.q.out @@ -269,6 +269,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vector_inner_join.q.out ql/src/test/results/clientpositive/llap/vector_inner_join.q.out index 82a29809a8..3537c4061b 100644 --- ql/src/test/results/clientpositive/llap/vector_inner_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_inner_join.q.out @@ -85,12 +85,14 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 0:int + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 2 @@ -160,10 +162,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -253,13 +254,15 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:int className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 2 @@ -333,10 +336,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -462,13 +464,15 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [] className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 0] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 3:string, 0:int + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1, _col2 input vertices: 1 Map 2 @@ -538,10 +542,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -631,10 +635,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -685,14 +689,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 0, 0, 1] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:string, 0:int, 0:int, 1:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 0 Map 1 @@ -795,14 +801,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3, 0] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int, 1:string, 3:string, 0:int + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 @@ -873,10 +881,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -967,14 +975,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int, 1:string, 3:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2 input vertices: 1 Map 2 @@ -1044,10 +1054,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -1138,14 +1148,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [1] + bigTableValueColumns: 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1, 3, 0] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 1:string, 3:string, 0:int + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1, _col2, _col3 input vertices: 1 Map 2 @@ -1215,10 +1227,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -1308,10 +1320,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -1362,14 +1374,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 0, 1] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:string, 0:int, 1:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col2, _col3 input vertices: 0 Map 1 @@ -1479,10 +1493,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 92 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -1533,14 +1547,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [1] + bigTableValueColumns: 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 0, 1] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 3:string, 0:int, 1:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3 input vertices: 0 Map 1 diff --git ql/src/test/results/clientpositive/llap/vector_interval_2.q.out ql/src/test/results/clientpositive/llap/vector_interval_2.q.out index b95fc593f4..a74023fc42 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_2.q.out @@ -258,8 +258,8 @@ from vector_interval_2 order by str1 POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_interval_2 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1-2 true true true true true true true true true true true true true true true true true true true true true true true true +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization expression select str1, @@ -452,8 +452,8 @@ from vector_interval_2 order by str1 POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_interval_2 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1-2 false false false false false false false false false false false false false false false false false false +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization expression select str3, @@ -670,8 +670,8 @@ from vector_interval_2 order by str3 POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_interval_2 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 2:3:4 true true true true true true true true true true true true true true true true true true true true true true true true +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization expression select str3, @@ -864,8 +864,8 @@ from vector_interval_2 order by str3 POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_interval_2 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1 2:3:4 false false false false false false false false false false false false false false false false false false +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization expression select ts from vector_interval_2 where diff --git ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out index d4151e87a4..da4c84b61e 100644 --- ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_interval_mapjoin.q.out @@ -230,6 +230,7 @@ STAGE PLANS: className: VectorMapJoinInnerBigOnlyMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2 input vertices: 1 Map 2 diff --git ql/src/test/results/clientpositive/llap/vector_join30.q.out ql/src/test/results/clientpositive/llap/vector_join30.q.out index 4b2f06ff85..9238bc7869 100644 --- ql/src/test/results/clientpositive/llap/vector_join30.q.out +++ ql/src/test/results/clientpositive/llap/vector_join30.q.out @@ -10,7 +10,7 @@ POSTHOOK: Output: database:default POSTHOOK: Output: default@orcsrc_n0 POSTHOOK: Lineage: orcsrc_n0.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: orcsrc_n0.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -18,7 +18,7 @@ JOIN ON (x.key = Y.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -51,6 +51,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -72,6 +73,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE @@ -86,6 +88,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -94,6 +102,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -116,9 +125,15 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:string, 1:string className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:string, 1:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col2, _col3 input vertices: 0 Map 1 @@ -141,6 +156,7 @@ STAGE PLANS: className: VectorReduceSinkEmptyKeyOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -154,14 +170,27 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: true vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -192,26 +221,7 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -select sum(hash(Y.key,Y.value)) -PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -select sum(hash(Y.key,Y.value)) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -103231310608 -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -219,7 +229,7 @@ LEFT OUTER JOIN ON (x.key = Y.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -251,6 +261,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -266,9 +277,15 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [] className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outerSmallTableKeyMapping: 0 -> 3 + projectedOutput: 3:string, 4:string + smallTableValueMapping: 4:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col2, _col3 input vertices: 1 Map 3 @@ -291,6 +308,7 @@ STAGE PLANS: className: VectorReduceSinkEmptyKeyOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -304,6 +322,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: true vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [string, string, bigint] Map 3 Map Operator Tree: TableScan @@ -311,6 +335,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 @@ -325,8 +350,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -340,14 +367,27 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -378,26 +418,7 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -select sum(hash(Y.key,Y.value)) -PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -select sum(hash(Y.key,Y.value)) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -103231310608 -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x RIGHT OUTER JOIN @@ -405,7 +426,7 @@ RIGHT OUTER JOIN ON (x.key = Y.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x RIGHT OUTER JOIN @@ -437,6 +458,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -451,6 +473,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -465,6 +488,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -472,6 +501,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 @@ -487,9 +517,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:string, 1:string className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 0:string, 1:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col2, _col3 input vertices: 0 Map 1 @@ -512,6 +547,7 @@ STAGE PLANS: className: VectorReduceSinkEmptyKeyOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -525,14 +561,27 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: true vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -563,26 +612,196 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM +PREHOOK: query: explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM +POSTHOOK: query: explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) select sum(hash(Y.key,Y.value)) POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) #### A masked pattern was here #### -103231310608 -PREHOOK: query: explain vectorization expression + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 550 Data size: 96342 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -593,7 +812,7 @@ JOIN ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -629,6 +848,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -650,6 +870,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE @@ -664,6 +885,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -672,6 +899,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -725,6 +953,7 @@ STAGE PLANS: className: VectorReduceSinkEmptyKeyOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -738,6 +967,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: true vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -746,6 +981,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -767,6 +1003,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 475 Data size: 83204 Basic stats: COMPLETE Column stats: NONE @@ -781,14 +1018,27 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -819,32 +1069,7 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -348019368476 -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -855,7 +1080,7 @@ LEFT OUTER JOIN ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x JOIN @@ -890,6 +1115,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -904,6 +1130,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -918,6 +1145,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -925,6 +1158,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 @@ -939,8 +1173,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -954,6 +1190,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -961,6 +1203,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -975,6 +1218,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -989,6 +1233,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1011,14 +1261,24 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -1049,32 +1309,7 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -348019368476 -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -1085,7 +1320,7 @@ LEFT OUTER JOIN ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -1120,6 +1355,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -1134,6 +1370,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -1148,6 +1385,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -1155,6 +1398,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 @@ -1169,8 +1413,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -1184,6 +1430,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -1191,6 +1443,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -1205,6 +1458,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -1219,6 +1473,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1241,14 +1501,24 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -1279,32 +1549,7 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -348019368476 -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -1315,7 +1560,7 @@ RIGHT OUTER JOIN ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x LEFT OUTER JOIN @@ -1350,6 +1595,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -1364,6 +1610,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -1378,6 +1625,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -1385,6 +1638,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 @@ -1399,8 +1653,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -1414,6 +1670,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -1421,6 +1683,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -1435,6 +1698,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -1449,6 +1713,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1471,14 +1741,24 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -1509,32 +1789,7 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -RIGHT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -LEFT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y -ON (x.key = Y.key) -RIGHT OUTER JOIN -(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z -ON (x.key = Z.key) -select sum(hash(Y.key,Y.value)) -POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -348019368476 -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x RIGHT OUTER JOIN @@ -1545,7 +1800,7 @@ RIGHT OUTER JOIN ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x RIGHT OUTER JOIN @@ -1580,6 +1835,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -1594,6 +1850,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -1608,6 +1865,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -1615,6 +1878,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 @@ -1629,8 +1893,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -1644,6 +1910,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -1651,6 +1923,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] Select Operator expressions: key (type: string) outputColumnNames: _col0 @@ -1665,6 +1938,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE @@ -1679,6 +1953,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: @@ -1701,14 +1981,24 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -1739,28 +2029,1443 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: FROM +PREHOOK: query: explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) -RIGHT OUTER JOIN +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) PREHOOK: type: QUERY -PREHOOK: Input: default@orcsrc_n0 -#### A masked pattern was here #### -POSTHOOK: query: FROM +POSTHOOK: query: explain vectorization detail +FROM (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x -RIGHT OUTER JOIN +JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 5 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Inner Join 0 to 1 + Full Outer Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 1100 Data size: 192684 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 5 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + Full Outer Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 1100 Data size: 192684 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +LEFT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +LEFT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 5 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + Left Outer Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 1100 Data size: 192684 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +LEFT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +LEFT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 5 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Left Outer Join 0 to 1 + Full Outer Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 1100 Data size: 192684 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +RIGHT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +RIGHT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 5 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + Right Outer Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 1100 Data size: 192684 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x +RIGHT OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y ON (x.key = Y.key) +FULL OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z +ON (x.key = Z.key) +select sum(hash(Y.key,Y.value)) +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization detail +FROM +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by key) x RIGHT OUTER JOIN +(SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Y +ON (x.key = Y.key) +FULL OUTER JOIN (SELECT orcsrc_n0.* FROM orcsrc_n0 sort by value) Z ON (x.key = Z.key) select sum(hash(Y.key,Y.value)) POSTHOOK: type: QUERY -POSTHOOK: Input: default@orcsrc_n0 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE) #### A masked pattern was here #### -348019368476 + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 4 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string), value (type: string) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1] + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:string + Statistics: Num rows: 500 Data size: 175168 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: string) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 5 + Map Operator Tree: + TableScan + alias: orcsrc_n0 + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:string, 1:value:string, 2:ROW__ID:struct] + Select Operator + expressions: key (type: string) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0] + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkStringOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 500 Data size: 87584 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0] + dataColumns: key:string, value:string + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Right Outer Join 0 to 1 + Full Outer Join 0 to 2 + keys: + 0 _col0 (type: string) + 1 _col0 (type: string) + 2 _col0 (type: string) + outputColumnNames: _col2, _col3 + Statistics: Num rows: 1100 Data size: 192684 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(hash(_col2,_col3)) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: + reduceColumnSortOrder: + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 1 + dataColumns: VALUE._col0:bigint + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + aggregators: VectorUDAFSumLong(col 0:bigint) -> bigint + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + projectedOutputColumnNums: [0] + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + diff --git ql/src/test/results/clientpositive/llap/vector_join_filters.q.out ql/src/test/results/clientpositive/llap/vector_join_filters.q.out index 7c1780bf84..a49e8e26a7 100644 --- ql/src/test/results/clientpositive/llap/vector_join_filters.q.out +++ ql/src/test/results/clientpositive/llap/vector_join_filters.q.out @@ -47,15 +47,174 @@ POSTHOOK: Input: default@myinput1_n1 #### A masked pattern was here #### 4937935 Warning: Map Join MAPJOIN[16][bigTable=?] in task 'Map 2' is a cross product -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -3080335 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: ((key > 40) and (value > 50) and (key = value)) (type: boolean) + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: ((key = value) and (key > 40) and (value > 50)) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {(_col0 > 40)} {(_col1 > 50)} {(_col0 = _col1)} + keys: + 0 + 1 + Map Join Vectorization: + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: Outer Join has keys IS false + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 4 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 4:int + Statistics: Num rows: 4 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n1 @@ -128,42 +287,681 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n1 #### A masked pattern was here #### 4937935 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -3080335 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: ((key > 40) and (value > 50) and (key = value)) (type: boolean) + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: ((key = value) and (key > 40) and (value > 50)) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {(_col0 > 40)} {(_col1 > 50)} {(_col0 = _col1)} + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -3080335 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: ((key > 40) and (value > 50) and (key = value)) (type: boolean) + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: ((key = value) and (key > 40) and (value > 50)) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {(_col0 > 40)} {(_col1 > 50)} {(_col0 = _col1)} + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -3080335 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: ((key > 40) and (value > 50) and (key = value)) (type: boolean) + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: ((key = value) and (key > 40) and (value > 50)) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {(_col0 > 40)} {(_col1 > 50)} {(_col0 = _col1)} + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n1 a RIGHT OUTER JOIN myinput1_n1 b ON a.key=b.key and a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n1 -#### A masked pattern was here #### -3080335 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: ((key > 40) and (value > 50) and (key = value)) (type: boolean) + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: ((key = value) and (key > 40) and (value > 50)) (type: boolean) + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: int), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 4 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {(_col0 > 40)} {(_col1 > 50)} {(_col0 = _col1)} + keys: + 0 _col0 (type: int), _col1 (type: int) + 1 _col0 (type: int), _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterMultiKeyOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 4 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n1 a LEFT OUTER JOIN myinput1_n1 b ON (a.value=b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) RIGHT OUTER JOIN myinput1_n1 c ON (b.value=c.value AND c.key > 40 AND c.value > 50 AND c.key = c.value AND b.key > 40 AND b.value > 50 AND b.key = b.value) PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n1 diff --git ql/src/test/results/clientpositive/llap/vector_join_nulls.q.out ql/src/test/results/clientpositive/llap/vector_join_nulls.q.out index 12db0367bb..b8d76ed10a 100644 --- ql/src/test/results/clientpositive/llap/vector_join_nulls.q.out +++ ql/src/test/results/clientpositive/llap/vector_join_nulls.q.out @@ -47,15 +47,167 @@ POSTHOOK: Input: default@myinput1_n4 #### A masked pattern was here #### 13630578 Warning: Map Join MAPJOIN[14][bigTable=?] in task 'Map 2' is a cross product -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -13630578 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + Map Join Vectorization: + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: Outer Join has keys IS false + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 9 Data size: 153 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 4:int + Statistics: Num rows: 9 Data size: 153 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a JOIN myinput1_n4 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n4 @@ -128,42 +280,643 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n4 #### A masked pattern was here #### 4542003 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.value +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -3079923 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.key +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.key PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.key +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key = b.key POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -4509891 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.value = b.value +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.value = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.value = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.value = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -3113558 -PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key=b.key and a.value = b.value +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR + +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key=b.key and a.value = b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key=b.key and a.value = b.value +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR + +SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n4 a RIGHT OUTER JOIN myinput1_n4 b ON a.key=b.key and a.value = b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1_n4 -#### A masked pattern was here #### -3079923 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (CUSTOM_SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: int) + sort order: ++ + Map-reduce partition columns: _col0 (type: int), _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkMultiKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col0 (type: int), _col1 (type: int) + 1 _col0 (type: int), _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterMultiKeyOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col2, _col3 + input vertices: + 0 Map 1 + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3)) -> 5:int + Statistics: Num rows: 3 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: true + vectorized: true + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: MERGEPARTIAL + native: false + vectorProcessingMode: GLOBAL + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) from myinput1_n4 a LEFT OUTER JOIN myinput1_n4 b ON (a.value=b.value) RIGHT OUTER JOIN myinput1_n4 c ON (b.value=c.value) PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n4 diff --git ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out index 55be9102ae..24386038bd 100644 --- ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_left_outer_join2.q.out @@ -65,6 +65,7 @@ POSTHOOK: Output: default@tjoin1 POSTHOOK: Lineage: tjoin1.c1 SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin1.c2 EXPRESSION [(tjoin1stage)tjoin1stage.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin1.rnum SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:rnum, type:int, comment:null), ] +_col0 _col1 _col2 PREHOOK: query: INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE PREHOOK: type: QUERY PREHOOK: Input: default@tjoin2stage @@ -76,12 +77,14 @@ POSTHOOK: Output: default@tjoin2 POSTHOOK: Lineage: tjoin2.c1 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin2.c2 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin2.rnum SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:rnum, type:int, comment:null), ] -PREHOOK: query: explain vectorization expression +tjoin2stage.rnum tjoin2stage.c1 tjoin2stage.c2 +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] @@ -167,15 +170,21 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -257,15 +266,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -289,6 +300,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -346,6 +358,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Map 2 Map Operator Tree: TableScan @@ -353,6 +371,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] Select Operator expressions: c1 (type: int), c2 (type: char(2)) outputColumnNames: _col0, _col1 @@ -367,8 +386,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap @@ -382,6 +403,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator @@ -399,15 +426,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -431,6 +460,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -488,6 +518,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Map 2 Map Operator Tree: TableScan @@ -495,6 +531,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] Select Operator expressions: c1 (type: int), c2 (type: char(2)) outputColumnNames: _col0, _col1 @@ -509,8 +546,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap @@ -524,6 +563,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator @@ -541,15 +586,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -573,6 +620,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -591,9 +639,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: + bigTableFilterExpressions: FilterLongColGreaterLongScalar(col 2:int, val 15) + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0, 1, 2] + bigTableValueColumns: 0:int, 1:int, 2:int className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 0:int, 1:int, 2:int, 4:char(2) + smallTableValueMapping: 4:char(2) + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col4 input vertices: 1 Map 2 @@ -627,6 +682,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Map 2 Map Operator Tree: TableScan @@ -634,6 +695,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] Select Operator expressions: c1 (type: int), c2 (type: char(2)) outputColumnNames: _col0, _col1 @@ -648,8 +710,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap @@ -663,6 +727,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator @@ -680,15 +750,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -712,6 +784,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -730,9 +803,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: + bigTableFilterExpressions: FilterLongColGreaterLongScalar(col 2:int, val 15) + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0, 1, 2] + bigTableValueColumns: 0:int, 1:int, 2:int className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + projectedOutput: 0:int, 1:int, 2:int, 4:char(2) + smallTableValueMapping: 4:char(2) + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col4 input vertices: 1 Map 2 @@ -766,6 +846,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Map 2 Map Operator Tree: TableScan @@ -773,6 +859,7 @@ STAGE PLANS: Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:char(2), 3:ROW__ID:struct] Select Operator expressions: c1 (type: int), c2 (type: char(2)) outputColumnNames: _col0, _col1 @@ -787,8 +874,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 2:char(2) Statistics: Num rows: 4 Data size: 360 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: char(2)) Execution mode: vectorized, llap @@ -802,6 +891,12 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [1, 2] + dataColumns: rnum:int, c1:int, c2:char(2) + partitionColumnCount: 0 + scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator @@ -819,6 +914,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL diff --git ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out index 9c51b32d45..960f5f5e10 100644 --- ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vector_leftsemi_mapjoin.q.out @@ -128,18 +128,100 @@ POSTHOOK: query: select * from t4_n19 POSTHOOK: type: QUERY POSTHOOK: Input: default@t4_n19 #### A masked pattern was here #### -PREHOOK: query: explain vectorization only summary - +PREHOOK: query: explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary - +POSTHOOK: query: explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -156,16 +238,100 @@ POSTHOOK: Input: default@t2_n87 10 val_10 4 val_4 8 val_8 -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -184,16 +350,100 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -204,18 +454,106 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t4_n19 #### A masked pattern was here #### -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value -PREHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col1 (type: int) + outputColumnNames: _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: (key < 15) (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key < 15) (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col1 + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col1 (type: int), _col1 (type: int) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value +PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### @@ -235,16 +573,100 @@ val_5 val_5 val_8 val_9 -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: ((value < 'val_10') and key is not null) (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((value < 'val_10') and key is not null) (type: boolean) + Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -258,224 +680,1433 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 0 val_0 -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -POSTHOOK: query: select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -val_10 -val_8 -val_9 -PREHOOK: query: explain vectorization only summary -select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value -PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value -POSTHOOK: type: QUERY -PLAN VECTORIZATION: - enabled: false - enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] - -PREHOOK: query: select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 -#### A masked pattern was here #### -POSTHOOK: query: select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 -#### A masked pattern was here #### -PREHOOK: query: explain vectorization only summary -select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value -PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value -POSTHOOK: type: QUERY -PLAN VECTORIZATION: - enabled: false - enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] - -PREHOOK: query: select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 -#### A masked pattern was here #### -POSTHOOK: query: select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 -#### A masked pattern was here #### -10 val_5 -10 val_5 -10 val_5 -4 val_2 -8 val_4 -PREHOOK: query: explain vectorization only summary -select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key -PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key -POSTHOOK: type: QUERY -PLAN VECTORIZATION: - enabled: false - enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 -PREHOOK: query: select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t3_n35 +STAGE PLANS: + Stage: Stage-1 + Tez #### A masked pattern was here #### -POSTHOOK: query: select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t3_n35 + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### -0 -0 -0 -0 -0 -0 -10 -10 -10 -10 -2 -4 -4 -5 -5 -5 -8 -8 -9 -PREHOOK: query: explain vectorization only summary -select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value -PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value -POSTHOOK: type: QUERY -PLAN VECTORIZATION: - enabled: false - enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: t3_n35 + filterExpr: (key > 5) (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key > 5) (type: boolean) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col1 + input vertices: + 1 Map 1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe -PREHOOK: query: select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value +POSTHOOK: query: select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -0 val_0 -0 val_0 -0 val_0 -8 val_8 -PREHOOK: query: explain vectorization only summary -select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +val_10 +val_8 +val_9 +PREHOOK: query: explain vectorization expression +select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +POSTHOOK: query: explain vectorization expression +select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: t2_n87 + filterExpr: ((key > 5) and (value <= 'val_20')) (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col1 + input vertices: + 1 Map 1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col1 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 -PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +POSTHOOK: query: select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 -POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -0 val_0 0 val_0 -10 val_10 10 val_5 -10 val_10 10 val_5 -10 val_10 10 val_5 -4 val_4 4 val_2 -8 val_8 8 val_4 -PREHOOK: query: explain vectorization only summary -select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +PREHOOK: query: explain vectorization expression +select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +POSTHOOK: query: explain vectorization expression +select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: t1_n148 + filterExpr: (key > 2) (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key > 2) (type: boolean) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +#### A masked pattern was here #### +POSTHOOK: query: select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +#### A masked pattern was here #### +10 val_5 +10 val_5 +10 val_5 +4 val_2 +8 val_4 +PREHOOK: query: explain vectorization expression +select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +2 +4 +4 +5 +5 +5 +8 +8 +9 +PREHOOK: query: explain vectorization expression +select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 (2 * _col0) (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: (2 * key) is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (2 * key) is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: (2 * _col0) (type: int) + sort order: + + Map-reduce partition columns: (2 * _col0) (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +#### A masked pattern was here #### +POSTHOOK: query: select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +8 val_8 +PREHOOK: query: explain vectorization expression +select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 1 Map 3 + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: int), _col3 (type: string) + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +0 val_0 0 val_0 +10 val_10 10 val_5 +10 val_10 10 val_5 +10 val_10 10 val_5 +4 val_4 4 val_2 +8 val_8 8 val_4 +PREHOOK: query: explain vectorization expression +select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 20 Data size: 3760 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int), value (type: string) + 1 _col0 (type: int), _col1 (type: string) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key is not null and value is not null) (type: boolean) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int), _col1 (type: string) + mode: hash + outputColumnNames: _col0, _col1 + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Map-reduce partition columns: _col0 (type: int), _col1 (type: string) + Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +2 val_2 +4 val_4 +5 val_5 +5 val_5 +5 val_5 +8 val_8 +9 val_9 +PREHOOK: query: explain vectorization expression +select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization expression +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + input vertices: + 1 Map 3 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 5 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -0 val_0 -0 val_0 -0 val_0 -0 val_0 -0 val_0 -0 val_0 -10 val_10 -2 val_2 -4 val_4 -5 val_5 -5 val_5 -5 val_5 -8 val_8 -9 val_9 -PREHOOK: query: explain vectorization only summary -select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +NULL +NULL +NULL +PREHOOK: query: explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: query: explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 5 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -487,6 +2118,18 @@ POSTHOOK: Input: default@t3_n35 0 0 0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 10 10 10 @@ -495,63 +2138,114 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization only summary -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +NULL +NULL +NULL +PREHOOK: query: explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: explain vectorization expression +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 -PREHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 -POSTHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -10 -10 -10 -10 -4 -4 -8 -8 -PREHOOK: query: explain vectorization only summary -select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key -PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key -POSTHOOK: type: QUERY -PLAN VECTORIZATION: - enabled: false - enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 5 + Map Operator Tree: + TableScan + alias: c + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + Left Semi Join 1 to 2 + keys: + 0 key (type: int) + 1 key (type: int) + 2 _col0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY @@ -594,16 +2288,122 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: c + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -645,23 +2445,136 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization only summary -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: explain vectorization expression +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: explain vectorization expression +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] -PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 5 + Map Operator Tree: + TableScan + alias: c + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -689,25 +2602,149 @@ POSTHOOK: Input: default@t3_n35 10 10 10 +10 +10 +10 +10 +10 +10 +10 +10 +2 4 4 +5 +5 +5 8 8 +9 NULL NULL NULL -NULL -NULL -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 5 + Map Operator Tree: + TableScan + alias: c + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reducer 3 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -762,16 +2799,122 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 3948 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 4342 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col1 (type: string) + 1 value (type: string) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 3 + Map Operator Tree: + TableScan + alias: b + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: c + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: value (type: string) + sort order: + + Map-reduce partition columns: value (type: string) + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 @@ -819,16 +2962,92 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization only summary +PREHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only summary +POSTHOOK: query: explain vectorization expression select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 2 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + filterExpr: ((key > 100) and value is not null) (type: boolean) + Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((key > 100) and value is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col1 (type: string) + 1 _col0 (type: string) + outputColumnNames: _col0 + input vertices: + 1 Map 2 + Statistics: Num rows: 12 Data size: 2226 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 12 Data size: 2226 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: llap + LLAP IO: all inputs + Map 2 + Map Operator Tree: + TableScan + alias: b + filterExpr: value is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: value is not null (type: boolean) + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: string) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 PREHOOK: type: QUERY PREHOOK: Input: default@t2_n87 @@ -839,10 +3058,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t2_n87 POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -881,6 +3100,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -949,10 +3169,10 @@ POSTHOOK: Input: default@t2_n87 10 val_10 4 val_4 8 val_8 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -991,6 +3211,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -1061,10 +3282,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1103,6 +3324,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -1165,10 +3387,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t4_n19 #### A masked pattern was here #### -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1207,6 +3429,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 @@ -1284,10 +3507,10 @@ val_5 val_5 val_8 val_9 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1326,6 +3549,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -1391,10 +3615,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 0 val_0 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1458,6 +3682,7 @@ STAGE PLANS: input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 @@ -1502,10 +3727,10 @@ POSTHOOK: Input: default@t3_n35 val_10 val_8 val_9 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1569,6 +3794,7 @@ STAGE PLANS: input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 @@ -1610,10 +3836,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 #### A masked pattern was here #### -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1677,6 +3903,7 @@ STAGE PLANS: input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -1719,10 +3946,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1761,6 +3988,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -1842,10 +4070,10 @@ POSTHOOK: Input: default@t3_n35 8 8 9 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1884,6 +4112,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -1950,10 +4179,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 8 val_8 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -1985,25 +4214,34 @@ STAGE PLANS: Map Join Operator condition map: Inner Join 0 to 1 - Left Semi Join 1 to 2 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int), _col1 (type: string) - sort order: ++ - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: int), _col3 (type: string) + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: int), _col3 (type: string) Execution mode: llap LLAP IO: all inputs Map 3 @@ -2054,10 +4292,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2095,10 +4333,10 @@ POSTHOOK: Input: default@t3_n35 10 val_10 10 val_5 4 val_4 4 val_2 8 val_8 8 val_4 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -2137,6 +4375,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ @@ -2213,10 +4452,10 @@ POSTHOOK: Input: default@t3_n35 5 val_5 8 val_8 9 val_9 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -2248,20 +4487,29 @@ STAGE PLANS: Map Join Operator condition map: Left Semi Join 0 to 1 - Left Semi Join 0 to 2 keys: 0 key (type: int) 1 _col0 (type: int) - 2 _col0 (type: int) outputColumnNames: _col0 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Map 3 @@ -2320,10 +4568,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2361,10 +4609,10 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -2392,20 +4640,32 @@ STAGE PLANS: Map Join Operator condition map: Left Outer Join 0 to 1 - Left Semi Join 1 to 2 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) - outputColumnNames: _col0 + outputColumnNames: _col0, _col5 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Map 3 @@ -2424,33 +4684,197 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 + Filter Operator + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Execution mode: llap + LLAP IO: all inputs + Map 5 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reducer 3 Execution mode: llap Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2462,13 +4886,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -2500,10 +4924,13 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization summary +NULL +NULL +NULL +PREHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -2519,7 +4946,7 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -2551,21 +4978,25 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 + Filter Operator + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -2573,28 +5004,40 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Left Semi Join 1 to 2 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: llap Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2647,10 +5090,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -2666,44 +5109,73 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) - Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs - Map 4 + Map 3 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 + Filter Operator + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs - Map 5 + Map 4 Map Operator Tree: TableScan alias: c @@ -2716,32 +5188,15 @@ STAGE PLANS: Execution mode: llap LLAP IO: all inputs Reducer 2 - Execution mode: llap - Reduce Operator Tree: - Merge Join Operator - condition map: - Left Semi Join 0 to 1 - Left Outer Join 0 to 2 - keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reducer 3 Execution mode: llap Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2794,11 +5249,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization summary -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: false @@ -2813,7 +5268,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -2821,33 +5277,52 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 + Filter Operator + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Map 5 @@ -2867,28 +5342,26 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Right Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: llap Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2900,13 +5373,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -2934,19 +5407,30 @@ POSTHOOK: Input: default@t3_n35 10 10 10 +10 +10 +10 +10 +10 +10 +10 +10 +2 4 4 +5 +5 +5 8 8 +9 NULL NULL NULL -NULL -NULL -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -2962,7 +5446,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -2970,33 +5455,52 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 + Filter Operator + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - keys: _col0 (type: int) - mode: hash - outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Map 5 @@ -3016,28 +5520,26 @@ STAGE PLANS: Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: llap Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3049,64 +5551,10 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 -PREHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 -POSTHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -2 -4 -4 -5 -5 -5 -8 -8 -9 -NULL -NULL -NULL -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -3145,6 +5593,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 23 Data size: 4342 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Map Join Operator condition map: Left Outer Join 0 to 1 @@ -3155,6 +5604,7 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -3266,10 +5716,10 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization summary +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization summary +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -3311,6 +5761,7 @@ STAGE PLANS: input vertices: 1 Map 2 Statistics: Num rows: 12 Data size: 2226 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true File Output Operator compressed: false Statistics: Num rows: 12 Data size: 2226 Basic stats: COMPLETE Column stats: NONE @@ -3391,6 +5842,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -3504,6 +5956,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -3619,6 +6072,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -3726,6 +6180,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -3847,6 +6302,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -3987,6 +6443,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -4100,6 +6557,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -4210,6 +6668,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -4292,6 +6751,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -4418,6 +6878,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -4529,17 +6990,24 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, One MapJoin Condition IS false - Select Vectorization: - className: VectorSelectOperator - native: true - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Map Join Vectorization: + bigTableKeyExpressions: col 2:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Select Vectorization: + className: VectorSelectOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -4677,6 +7145,7 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -4773,7 +7242,167 @@ PREHOOK: query: explain vectorization only operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY POSTHOOK: query: explain vectorization only operator -select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Vertices: + Map 1 + Map Operator Tree: + TableScan Vectorization: + native: true + Filter Vectorization: + className: VectorFilterOperator + native: true + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 3 + Map Operator Tree: + TableScan Vectorization: + native: true + Filter Vectorization: + className: VectorFilterOperator + native: true + Select Vectorization: + className: VectorSelectOperator + native: true + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 4 + Map Operator Tree: + TableScan Vectorization: + native: true + Filter Vectorization: + className: VectorFilterOperator + native: true + Select Vectorization: + className: VectorSelectOperator + native: true + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Vectorization: + className: VectorSelectOperator + native: true + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + + Stage: Stage-0 + Fetch Operator + +PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization only operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization only operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -4794,18 +7423,25 @@ STAGE PLANS: Map Operator Tree: TableScan Vectorization: native: true - Filter Vectorization: - className: VectorFilterOperator - native: true - Map Join Vectorization: - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, One MapJoin Condition IS false - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Filter Vectorization: + className: VectorFilterOperator + native: true + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -4821,21 +7457,10 @@ STAGE PLANS: Map Operator Tree: TableScan Vectorization: native: true - Filter Vectorization: - className: VectorFilterOperator + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator native: true - Select Vectorization: - className: VectorSelectOperator - native: true - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - native: false - vectorProcessingMode: HASH - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -4844,7 +7469,7 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true Map 4 @@ -4896,13 +7521,13 @@ STAGE PLANS: Stage: Stage-0 Fetch Operator -PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -4914,6 +7539,18 @@ POSTHOOK: Input: default@t3_n35 0 0 0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 10 10 10 @@ -4922,11 +7559,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization only operator -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only operator -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -4939,23 +7576,28 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Tez +#### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - Map Join Vectorization: - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, One MapJoin Condition IS false - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -4964,17 +7606,25 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - Map 3 + Map 4 Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -4986,22 +7636,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - Map 4 + Map 5 Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - native: false - vectorProcessingMode: HASH - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5014,6 +7688,37 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true @@ -5022,23 +7727,37 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator + limit: -1 + Processor Tree: + ListSink -PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -5070,10 +7789,13 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization only operator +NULL +NULL +NULL +PREHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization only operator +POSTHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -5087,18 +7809,28 @@ STAGE DEPENDENCIES: STAGE PLANS: Stage: Stage-1 Tez +#### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5112,12 +7844,20 @@ STAGE PLANS: vectorized: true Map 4 Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5131,20 +7871,44 @@ STAGE PLANS: vectorized: true Map 5 Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - native: false - vectorProcessingMode: HASH - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5157,6 +7921,36 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -5166,57 +7960,30 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator + limit: -1 + Processor Tree: + ListSink -PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 -PREHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 -POSTHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -10 -10 -10 -10 -4 -4 -8 -8 -NULL -NULL -NULL PREHOOK: query: explain vectorization only operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY @@ -5235,17 +8002,32 @@ STAGE PLANS: Stage: Stage-1 Tez Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) - Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) Vertices: Map 1 Map Operator Tree: TableScan Vectorization: native: true - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5254,25 +8036,28 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: false vectorized: true - Map 4 + Map 3 Map Operator Tree: TableScan Vectorization: native: true - Select Vectorization: - className: VectorSelectOperator + Filter Vectorization: + className: VectorFilterOperator native: true - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - native: false - vectorProcessingMode: HASH - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Select Vectorization: + className: VectorSelectOperator + native: true + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5284,7 +8069,7 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - Map 5 + Map 4 Map Operator Tree: TableScan Vectorization: native: true @@ -5304,7 +8089,6 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reducer 2 - Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true @@ -5365,10 +8149,10 @@ POSTHOOK: Input: default@t3_n35 8 8 PREHOOK: query: explain vectorization only operator -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY POSTHOOK: query: explain vectorization only operator -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -5382,17 +8166,27 @@ STAGE PLANS: Stage: Stage-1 Tez Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) Vertices: Map 1 Map Operator Tree: TableScan Vectorization: native: true - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5401,25 +8195,28 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: false vectorized: true Map 4 Map Operator Tree: TableScan Vectorization: native: true - Select Vectorization: - className: VectorSelectOperator + Filter Vectorization: + className: VectorFilterOperator native: true - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - native: false - vectorProcessingMode: HASH - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Select Vectorization: + className: VectorSelectOperator + native: true + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5451,6 +8248,9 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reducer 2 + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -5470,13 +8270,13 @@ STAGE PLANS: Stage: Stage-0 Fetch Operator -PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -5504,12 +8304,23 @@ POSTHOOK: Input: default@t3_n35 10 10 10 +10 +10 +10 +10 +10 +10 +10 +10 +2 4 4 +5 +5 +5 8 8 -NULL -NULL +9 NULL NULL NULL @@ -5531,17 +8342,27 @@ STAGE PLANS: Stage: Stage-1 Tez Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) Vertices: Map 1 Map Operator Tree: TableScan Vectorization: native: true - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5550,25 +8371,28 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: false vectorized: true Map 4 Map Operator Tree: TableScan Vectorization: native: true - Select Vectorization: - className: VectorSelectOperator + Filter Vectorization: + className: VectorFilterOperator native: true - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - native: false - vectorProcessingMode: HASH - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Select Vectorization: + className: VectorSelectOperator + native: true + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -5600,6 +8424,9 @@ STAGE PLANS: usesVectorUDFAdaptor: false vectorized: true Reducer 2 + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -5619,60 +8446,6 @@ STAGE PLANS: Stage: Stage-0 Fetch Operator -PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key -PREHOOK: type: QUERY -PREHOOK: Input: default@t1_n148 -PREHOOK: Input: default@t2_n87 -PREHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key -POSTHOOK: type: QUERY -POSTHOOK: Input: default@t1_n148 -POSTHOOK: Input: default@t2_n87 -POSTHOOK: Input: default@t3_n35 -#### A masked pattern was here #### -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -2 -4 -4 -5 -5 -5 -8 -8 -9 -NULL -NULL -NULL PREHOOK: query: explain vectorization only operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY @@ -5702,11 +8475,13 @@ STAGE PLANS: className: VectorFilterOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false Map Join Vectorization: + bigTableKeyExpressions: col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true @@ -5872,6 +8647,7 @@ STAGE PLANS: className: VectorSelectOperator native: true Map Join Vectorization: + bigTableKeyExpressions: col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -5934,10 +8710,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t2_n87 POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -5965,12 +8741,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -5981,7 +8755,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -5990,15 +8763,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6011,12 +8783,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -6025,12 +8791,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -6039,16 +8803,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -6059,10 +8820,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6075,27 +8834,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -6103,7 +8849,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6138,10 +8883,10 @@ POSTHOOK: Input: default@t2_n87 10 val_10 4 val_4 8 val_8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -6169,12 +8914,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -6185,7 +8928,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -6194,15 +8936,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6215,12 +8956,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -6229,12 +8964,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -6243,16 +8976,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -6263,10 +8993,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6279,27 +9007,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -6307,7 +9022,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6344,10 +9058,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -6375,12 +9089,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -6391,7 +9103,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -6400,15 +9111,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6421,12 +9131,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -6435,12 +9139,10 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -6449,16 +9151,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -6469,10 +9168,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6485,27 +9182,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -6513,7 +9197,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6542,10 +9225,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t4_n19 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -6573,12 +9256,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -6589,7 +9270,6 @@ STAGE PLANS: 1 _col1 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -6598,23 +9278,21 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6627,12 +9305,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -6641,12 +9313,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColLessLongScalar(col 0:int, val 15) predicate: (key < 15) (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -6655,16 +9325,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col1 (type: int), _col1 (type: int) mode: hash outputColumnNames: _col0, _col1 @@ -6675,10 +9342,8 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6691,27 +9356,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -6719,7 +9371,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6759,10 +9410,10 @@ val_5 val_5 val_8 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -6790,12 +9441,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -6806,7 +9455,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -6815,15 +9463,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6836,12 +9483,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -6850,12 +9491,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterStringGroupColLessStringScalar(col 1:string, val val_10), SelectColumnIsNotNull(col 0:int)) predicate: ((value < 'val_10') and key is not null) (type: boolean) Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -6864,16 +9503,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -6884,10 +9520,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -6900,27 +9534,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -6928,7 +9549,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -6960,10 +9580,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 0 val_0 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -6991,12 +9611,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColGreaterLongScalar(col 0:int, val 5) predicate: (key > 5) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -7005,16 +9623,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -7025,10 +9640,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7041,12 +9654,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -7055,12 +9662,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -7071,7 +9676,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -7080,23 +9684,21 @@ STAGE PLANS: input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7109,27 +9711,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -7137,7 +9726,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -7169,10 +9757,10 @@ POSTHOOK: Input: default@t3_n35 val_10 val_8 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -7200,12 +9788,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColGreaterLongScalar(col 0:int, val 5), FilterStringGroupColLessEqualStringScalar(col 1:string, val val_20)) predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -7214,16 +9800,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -7234,10 +9817,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7250,12 +9831,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -7264,12 +9839,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -7280,7 +9853,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -7289,23 +9861,21 @@ STAGE PLANS: input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7318,27 +9888,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -7346,7 +9903,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -7375,10 +9931,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -7406,12 +9962,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColGreaterLongScalar(col 0:int, val 2) predicate: (key > 2) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -7420,16 +9974,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -7440,10 +9991,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7456,12 +10005,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -7470,12 +10013,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -7486,7 +10027,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -7495,15 +10035,14 @@ STAGE PLANS: input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7516,27 +10055,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -7544,7 +10070,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -7578,10 +10103,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -7609,12 +10134,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -7625,7 +10148,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -7634,15 +10156,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7655,12 +10176,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -7669,12 +10184,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -7683,16 +10196,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -7703,10 +10213,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7719,27 +10227,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -7747,7 +10242,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -7795,10 +10289,10 @@ POSTHOOK: Input: default@t3_n35 8 8 9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -7826,12 +10320,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -7842,7 +10334,6 @@ STAGE PLANS: 1 (2 * _col0) (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -7851,15 +10342,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7872,12 +10362,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -7886,12 +10370,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 3:int)(children: LongScalarMultiplyLongColumn(val 2, col 0:int) -> 3:int) predicate: (2 * key) is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -7900,16 +10382,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -7920,11 +10399,8 @@ STAGE PLANS: Map-reduce partition columns: (2 * _col0) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] - keyExpressions: LongScalarMultiplyLongColumn(val 2, col 0:int) -> 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -7937,27 +10413,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -7965,7 +10428,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -7998,10 +10460,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 8 val_8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -8029,53 +10491,62 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 - Left Semi Join 1 to 2 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, One MapJoin Condition IS false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false outputColumnNames: _col0, _col1, _col5, _col6 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Select Vectorization: - className: VectorSelectOperator - native: true - projectedOutputColumnNums: [0, 1, 2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int), _col1 (type: string) - sort order: ++ - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 2:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: int), _col3 (type: string) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -8087,12 +10558,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [string] Map 3 Map Operator Tree: TableScan @@ -8101,12 +10566,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator @@ -8115,10 +10578,8 @@ STAGE PLANS: Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Execution mode: vectorized, llap @@ -8132,12 +10593,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -8146,12 +10601,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -8160,16 +10613,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -8180,10 +10630,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -8196,27 +10644,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string, VALUE._col0:int, VALUE._col1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) @@ -8224,14 +10659,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1, 2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8269,10 +10703,10 @@ POSTHOOK: Input: default@t3_n35 10 val_10 10 val_5 4 val_4 4 val_2 8 val_8 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -8300,12 +10734,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:int), SelectColumnIsNotNull(col 1:string)) predicate: (key is not null and value is not null) (type: boolean) Statistics: Num rows: 20 Data size: 3760 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -8316,7 +10748,6 @@ STAGE PLANS: 1 _col0 (type: int), _col1 (type: string) Map Join Vectorization: bigTableKeyExpressions: col 0:int, col 1:string - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -8325,15 +10756,14 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -8346,12 +10776,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -8360,12 +10784,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:int), SelectColumnIsNotNull(col 1:string)) predicate: (key is not null and value is not null) (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -8374,16 +10796,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -8394,10 +10813,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -8410,27 +10827,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -8438,7 +10842,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -8481,10 +10884,10 @@ POSTHOOK: Input: default@t3_n35 5 val_5 8 val_8 9 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -8512,44 +10915,54 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 - Left Semi Join 0 to 2 keys: 0 key (type: int) 1 _col0 (type: int) - 2 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int className: VectorMapJoinOperator native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, One MapJoin Condition IS false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false outputColumnNames: _col0 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -8561,12 +10974,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -8575,12 +10982,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -8589,16 +10994,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -8609,10 +11011,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -8622,15 +11022,235 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 4 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0, _col5 + input vertices: + 1 Map 3 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 3 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -8639,12 +11259,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -8653,16 +11271,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -8673,10 +11288,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -8689,27 +11302,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -8717,14 +11317,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8736,13 +11335,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -8754,6 +11353,18 @@ POSTHOOK: Input: default@t3_n35 0 0 0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 10 10 10 @@ -8762,11 +11373,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -8781,48 +11392,26 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Map Join Operator - condition map: - Left Outer Join 0 to 1 - Left Semi Join 1 to 2 - keys: - 0 key (type: int) - 1 key (type: int) - 2 _col0 (type: int) - Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, One MapJoin Condition IS false - outputColumnNames: _col0 - input vertices: - 1 Map 3 - 2 Map 4 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -8831,34 +11420,25 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 3 + Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -8870,51 +11450,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 4 + Map 5 Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -8926,27 +11501,46 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -8954,14 +11548,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8973,13 +11566,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -9011,10 +11604,13 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail +NULL +NULL +NULL +PREHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -9030,7 +11626,7 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -9041,17 +11637,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -9064,12 +11657,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -9077,17 +11664,14 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -9100,51 +11684,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9156,44 +11735,46 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Left Semi Join 1 to 2 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -9201,14 +11782,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9261,10 +11841,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -9280,29 +11860,66 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) - Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9311,54 +11928,49 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 4 + Map 3 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9370,30 +11982,21 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 5 + Map 4 Map Operator Tree: TableScan alias: c Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -9406,44 +12009,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 - Execution mode: llap - Reduce Operator Tree: - Merge Join Operator - condition map: - Left Semi Join 0 to 1 - Left Outer Join 0 to 2 - keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -9451,14 +12024,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9511,11 +12083,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -9530,7 +12102,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -9538,21 +12111,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9561,54 +12155,49 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9620,12 +12209,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -9633,17 +12216,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -9656,44 +12236,32 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Right Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -9701,14 +12269,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9720,13 +12287,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -9754,19 +12321,30 @@ POSTHOOK: Input: default@t3_n35 10 10 10 +10 +10 +10 +10 +10 +10 +10 +10 +2 4 4 +5 +5 +5 8 8 +9 NULL NULL NULL -NULL -NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -9782,7 +12360,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -9790,21 +12369,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 0:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9813,54 +12413,49 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: true + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -9872,12 +12467,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -9885,17 +12474,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -9908,44 +12494,32 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -9953,14 +12527,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10026,10 +12599,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -10057,12 +12630,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 3948 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -10073,7 +12644,6 @@ STAGE PLANS: 1 _col0 (type: int) Map Join Vectorization: bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -10082,6 +12652,7 @@ STAGE PLANS: input vertices: 1 Map 3 Statistics: Num rows: 23 Data size: 4342 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Map Join Operator condition map: Left Outer Join 0 to 1 @@ -10090,7 +12661,6 @@ STAGE PLANS: 1 value (type: string) Map Join Vectorization: bigTableKeyExpressions: col 1:string - bigTableValueExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true @@ -10099,15 +12669,14 @@ STAGE PLANS: input vertices: 1 Map 4 Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10120,12 +12689,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -10134,12 +12697,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -10148,16 +12709,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -10168,10 +12726,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10184,12 +12740,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -10197,17 +12747,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10220,27 +12767,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -10248,7 +12782,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -10314,10 +12847,10 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -10344,12 +12877,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColGreaterLongScalar(col 0:int, val 100), SelectColumnIsNotNull(col 1:string)) predicate: ((key > 100) and value is not null) (type: boolean) Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -10358,7 +12889,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -10368,7 +12898,6 @@ STAGE PLANS: 1 _col0 (type: string) Map Join Vectorization: bigTableKeyExpressions: col 1:string - bigTableValueExpressions: col 0:int className: VectorMapJoinOperator native: false nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true @@ -10377,6 +12906,7 @@ STAGE PLANS: input vertices: 1 Map 2 Statistics: Num rows: 12 Data size: 2226 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true File Output Operator compressed: false File Sink Vectorization: @@ -10398,12 +12928,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -10412,12 +12936,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 1:string) predicate: value is not null (type: boolean) Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -10426,16 +12948,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: string) mode: hash outputColumnNames: _col0 @@ -10446,10 +12965,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10462,12 +12979,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator @@ -10485,10 +12996,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t2_n87 POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -10516,12 +13027,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -10531,13 +13040,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -10547,10 +13052,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10563,12 +13066,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -10577,12 +13074,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -10591,16 +13086,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -10611,10 +13103,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10627,27 +13117,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -10655,7 +13132,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -10690,10 +13166,10 @@ POSTHOOK: Input: default@t2_n87 10 val_10 4 val_4 8 val_8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -10721,12 +13197,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -10736,13 +13210,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -10752,10 +13222,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10768,12 +13236,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -10782,12 +13244,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -10796,16 +13256,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -10816,10 +13273,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10832,27 +13287,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -10860,7 +13302,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -10897,10 +13338,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -10928,12 +13369,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -10943,13 +13382,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -10959,10 +13394,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -10975,12 +13408,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -10989,12 +13416,10 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -11003,16 +13428,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -11023,10 +13445,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11039,27 +13459,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -11067,7 +13474,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -11096,10 +13502,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t4_n19 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -11127,12 +13533,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -11142,13 +13546,9 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [1] - bigTableValueColumnNums: [1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1] outputColumnNames: _col1 input vertices: 1 Map 3 @@ -11159,17 +13559,14 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11182,12 +13579,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -11196,12 +13587,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColLessLongScalar(col 0:int, val 15) predicate: (key < 15) (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -11210,16 +13599,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col1 (type: int), _col1 (type: int) mode: hash outputColumnNames: _col0, _col1 @@ -11230,10 +13616,8 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11246,27 +13630,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -11274,7 +13645,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -11314,10 +13684,10 @@ val_5 val_5 val_8 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -11345,12 +13715,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -11360,13 +13728,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -11376,10 +13740,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11392,12 +13754,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -11406,12 +13762,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterStringGroupColLessStringScalar(col 1:string, val val_10), SelectColumnIsNotNull(col 0:int)) predicate: ((value < 'val_10') and key is not null) (type: boolean) Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -11420,16 +13774,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -11440,10 +13791,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11456,27 +13805,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -11484,7 +13820,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -11516,10 +13851,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 0 val_0 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -11547,12 +13882,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColGreaterLongScalar(col 0:int, val 5) predicate: (key > 5) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -11561,16 +13894,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -11581,10 +13911,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11597,12 +13925,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -11611,12 +13933,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -11626,13 +13946,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [1] - bigTableValueColumnNums: [1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1] outputColumnNames: _col1 input vertices: 1 Map 1 @@ -11643,17 +13959,14 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11666,27 +13979,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -11694,7 +13994,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -11726,10 +14025,10 @@ POSTHOOK: Input: default@t3_n35 val_10 val_8 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -11757,12 +14056,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColGreaterLongScalar(col 0:int, val 5), FilterStringGroupColLessEqualStringScalar(col 1:string, val val_20)) predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -11771,16 +14068,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -11791,10 +14085,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11807,12 +14099,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -11821,12 +14107,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -11836,13 +14120,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [1] - bigTableValueColumnNums: [1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1] outputColumnNames: _col1 input vertices: 1 Map 1 @@ -11853,17 +14133,14 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -11876,27 +14153,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -11904,7 +14168,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -11933,10 +14196,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -11964,12 +14227,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColGreaterLongScalar(col 0:int, val 2) predicate: (key > 2) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -11978,16 +14239,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -11998,10 +14256,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12014,12 +14270,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -12028,12 +14278,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -12043,13 +14291,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 1 @@ -12059,10 +14303,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12075,27 +14317,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -12103,7 +14332,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -12137,10 +14365,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -12168,12 +14396,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -12183,13 +14409,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] outputColumnNames: _col0 input vertices: 1 Map 3 @@ -12199,10 +14421,8 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12215,12 +14435,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -12229,12 +14443,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -12243,16 +14455,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -12263,10 +14472,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12279,27 +14486,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -12307,7 +14501,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -12355,10 +14548,10 @@ POSTHOOK: Input: default@t3_n35 8 8 9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -12386,12 +14579,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -12401,13 +14592,9 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -12417,10 +14604,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12433,12 +14618,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -12447,12 +14626,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 3:int)(children: LongScalarMultiplyLongColumn(val 2, col 0:int) -> 3:int) predicate: (2 * key) is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -12461,16 +14638,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -12481,11 +14655,8 @@ STAGE PLANS: Map-reduce partition columns: (2 * _col0) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] - keyExpressions: LongScalarMultiplyLongColumn(val 2, col 0:int) -> 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12498,27 +14669,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -12526,7 +14684,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -12559,10 +14716,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 8 val_8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -12590,53 +14747,56 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 - Left Semi Join 1 to 2 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: One MapJoin Condition IS false + className: VectorMapJoinInnerLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0, _col1, _col5, _col6 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Select Vectorization: - className: VectorSelectOperator + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator native: true - projectedOutputColumnNums: [0, 1, 2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int), _col1 (type: string) - sort order: ++ - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: int), _col3 (type: string) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -12645,15 +14805,9 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [string] Map 3 Map Operator Tree: TableScan @@ -12662,12 +14816,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator @@ -12676,10 +14828,8 @@ STAGE PLANS: Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Execution mode: vectorized, llap @@ -12693,12 +14843,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -12707,12 +14851,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -12721,16 +14863,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -12741,10 +14880,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12757,27 +14894,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string, VALUE._col0:int, VALUE._col1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) @@ -12785,14 +14909,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1, 2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12830,10 +14953,10 @@ POSTHOOK: Input: default@t3_n35 10 val_10 10 val_5 4 val_4 4 val_2 8 val_8 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -12861,12 +14984,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:int), SelectColumnIsNotNull(col 1:string)) predicate: (key is not null and value is not null) (type: boolean) Statistics: Num rows: 20 Data size: 3760 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -12876,13 +14997,9 @@ STAGE PLANS: 0 key (type: int), value (type: string) 1 _col0 (type: int), _col1 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [0, 1] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -12892,10 +15009,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12908,12 +15023,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -12922,12 +15031,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:int), SelectColumnIsNotNull(col 1:string)) predicate: (key is not null and value is not null) (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -12936,16 +15043,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -12956,10 +15060,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -12972,27 +15074,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -13000,7 +15089,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -13043,10 +15131,10 @@ POSTHOOK: Input: default@t3_n35 5 val_5 8 val_8 9 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -13074,44 +15162,48 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 - Left Semi Join 0 to 2 keys: 0 key (type: int) 1 _col0 (type: int) - 2 _col0 (type: int) Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: One MapJoin Condition IS false + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13120,15 +15212,9 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -13137,12 +15223,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -13151,16 +15235,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -13171,10 +15252,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -13184,15 +15263,229 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 4 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col5 + input vertices: + 1 Map 3 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 3 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -13201,12 +15494,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -13215,16 +15506,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -13235,10 +15523,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -13251,27 +15537,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -13279,14 +15552,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13298,13 +15570,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -13316,6 +15588,18 @@ POSTHOOK: Input: default@t3_n35 0 0 0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 10 10 10 @@ -13324,11 +15608,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -13343,48 +15627,26 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Map Join Operator - condition map: - Left Outer Join 0 to 1 - Left Semi Join 1 to 2 - keys: - 0 key (type: int) - 1 key (type: int) - 2 _col0 (type: int) - Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: One MapJoin Condition IS false - outputColumnNames: _col0 - input vertices: - 1 Map 3 - 2 Map 4 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13393,34 +15655,25 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 3 + Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13432,51 +15685,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 4 + Map 5 Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13488,27 +15736,45 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -13516,14 +15782,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13535,13 +15800,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -13573,10 +15838,13 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail +NULL +NULL +NULL +PREHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -13592,7 +15860,7 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -13603,17 +15871,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -13626,12 +15891,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -13639,17 +15898,14 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -13662,51 +15918,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13718,44 +15969,45 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Left Semi Join 1 to 2 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -13763,14 +16015,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13823,10 +16074,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -13842,29 +16093,60 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) - Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13876,51 +16158,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 4 + Map 3 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -13932,30 +16209,21 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 5 + Map 4 Map Operator Tree: TableScan alias: c Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -13968,44 +16236,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 - Execution mode: llap - Reduce Operator Tree: - Merge Join Operator - condition map: - Left Semi Join 0 to 1 - Left Outer Join 0 to 2 - keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -14013,14 +16251,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14073,11 +16310,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -14092,7 +16329,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -14100,21 +16338,39 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -14126,51 +16382,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -14182,12 +16433,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -14195,17 +16440,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -14218,44 +16460,32 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Right Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -14263,14 +16493,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14282,13 +16511,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -14316,19 +16545,30 @@ POSTHOOK: Input: default@t3_n35 10 10 10 +10 +10 +10 +10 +10 +10 +10 +10 +2 4 4 +5 +5 +5 8 8 +9 NULL NULL NULL -NULL -NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -14344,7 +16584,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -14352,21 +16593,39 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -14378,51 +16637,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -14434,12 +16688,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -14447,17 +16695,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -14470,44 +16715,32 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -14515,14 +16748,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14588,10 +16820,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -14619,12 +16851,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 3948 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -14634,13 +16864,9 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 @@ -14652,13 +16878,9 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] outputColumnNames: _col0 input vertices: 1 Map 4 @@ -14668,10 +16890,8 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -14684,12 +16904,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -14698,12 +16912,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -14712,16 +16924,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -14732,10 +16941,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -14748,12 +16955,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -14761,17 +16962,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -14784,27 +16982,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -14812,7 +16997,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -14878,10 +17062,10 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -14908,12 +17092,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColGreaterLongScalar(col 0:int, val 100), SelectColumnIsNotNull(col 1:string)) predicate: ((key > 100) and value is not null) (type: boolean) Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -14922,7 +17104,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -14931,13 +17112,9 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] className: VectorMapJoinLeftSemiStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] outputColumnNames: _col0 input vertices: 1 Map 2 @@ -14963,12 +17140,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -14977,12 +17148,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 1:string) predicate: value is not null (type: boolean) Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -14991,16 +17160,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: string) mode: hash outputColumnNames: _col0 @@ -15011,10 +17177,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15027,12 +17191,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator @@ -15050,10 +17208,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t2_n87 POSTHOOK: Input: default@t3_n35 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key=b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -15081,12 +17239,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -15096,26 +17252,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15128,12 +17279,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -15142,12 +17287,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -15156,16 +17299,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -15176,10 +17316,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15192,27 +17330,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -15220,7 +17345,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -15255,10 +17379,10 @@ POSTHOOK: Input: default@t2_n87 10 val_10 4 val_4 8 val_8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join t1_n148 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -15286,12 +17410,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -15301,26 +17423,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15333,12 +17450,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -15347,12 +17458,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -15361,16 +17470,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -15381,10 +17487,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15397,27 +17501,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -15425,7 +17516,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -15462,10 +17552,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t4_n19 b on b.key=a.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -15493,12 +17583,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -15508,26 +17596,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15540,12 +17623,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -15554,12 +17631,10 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -15568,16 +17643,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -15588,10 +17660,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15604,27 +17674,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -15632,7 +17689,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -15661,10 +17717,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t4_n19 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join t3_n35 b on (b.key = a.key and b.key < '15') sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -15692,12 +17748,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -15707,34 +17761,28 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [1] - bigTableValueColumnNums: [1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1] outputColumnNames: _col1 input vertices: 1 Map 3 Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15747,12 +17795,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -15761,12 +17803,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColLessLongScalar(col 0:int, val 15) predicate: (key < 15) (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -15775,16 +17815,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col1 (type: int), _col1 (type: int) mode: hash outputColumnNames: _col0, _col1 @@ -15795,10 +17832,8 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15811,27 +17846,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -15839,7 +17861,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -15879,10 +17900,10 @@ val_5 val_5 val_8 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = b.key and b.value < "val_10" sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -15910,12 +17931,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -15925,26 +17944,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -15957,12 +17971,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -15971,12 +17979,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterStringGroupColLessStringScalar(col 1:string, val val_10), SelectColumnIsNotNull(col 0:int)) predicate: ((value < 'val_10') and key is not null) (type: boolean) Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -15985,16 +17991,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -16005,10 +18008,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16021,27 +18022,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -16049,7 +18037,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -16081,10 +18068,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 0 val_0 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key from t3_n35 where key > 5) b on a.key = b.key sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -16112,12 +18099,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColGreaterLongScalar(col 0:int, val 5) predicate: (key > 5) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -16126,16 +18111,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -16146,10 +18128,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16162,12 +18142,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -16176,12 +18150,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -16191,34 +18163,28 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [1] - bigTableValueColumnNums: [1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1] outputColumnNames: _col1 input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16231,27 +18197,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -16259,7 +18212,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -16291,10 +18243,10 @@ POSTHOOK: Input: default@t3_n35 val_10 val_8 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.value from t1_n148 a left semi join (select key , value from t2_n87 where key > 5) b on a.key = b.key and b.value <= 'val_20' sort by a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -16322,12 +18274,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColGreaterLongScalar(col 0:int, val 5), FilterStringGroupColLessEqualStringScalar(col 1:string, val val_20)) predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -16336,16 +18286,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -16356,10 +18303,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16372,12 +18317,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -16386,12 +18325,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -16401,34 +18338,28 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [1] - bigTableValueColumnNums: [1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1] outputColumnNames: _col1 input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16441,27 +18372,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) @@ -16469,7 +18387,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -16498,10 +18415,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t2_n87 a left semi join (select key , value from t1_n148 where key > 2) b on a.key = b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -16529,12 +18446,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterLongColGreaterLongScalar(col 0:int, val 2) predicate: (key > 2) (type: boolean) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -16543,16 +18458,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -16563,10 +18475,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16579,12 +18489,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -16593,12 +18497,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -16608,26 +18510,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 1 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16640,27 +18537,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -16668,7 +18552,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -16702,10 +18585,10 @@ POSTHOOK: Input: default@t2_n87 10 val_5 4 val_2 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -16733,12 +18616,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -16748,26 +18629,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] outputColumnNames: _col0 input vertices: 1 Map 3 Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16780,12 +18656,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -16794,12 +18664,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -16808,16 +18676,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -16828,10 +18693,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16844,27 +18707,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -16872,7 +18722,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -16920,10 +18769,10 @@ POSTHOOK: Input: default@t3_n35 8 8 9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a left semi join t2_n87 b on a.key = 2*b.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -16951,12 +18800,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -16966,26 +18813,21 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -16998,12 +18840,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -17012,12 +18848,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 3:int)(children: LongScalarMultiplyLongColumn(val 2, col 0:int) -> 3:int) predicate: (2 * key) is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -17026,16 +18860,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -17046,11 +18877,8 @@ STAGE PLANS: Map-reduce partition columns: (2 * _col0) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] - keyExpressions: LongScalarMultiplyLongColumn(val 2, col 0:int) -> 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -17063,27 +18891,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -17091,7 +18906,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -17124,10 +18938,10 @@ POSTHOOK: Input: default@t2_n87 0 val_0 0 val_0 8 val_8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t1_n148 a join t2_n87 b on a.key = b.key left semi join t3_n35 c on b.key = c.key sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -17155,53 +18969,58 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 - Left Semi Join 1 to 2 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int, col 1:string - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: One MapJoin Condition IS false + className: VectorMapJoinInnerLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0, _col1, _col5, _col6 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) - outputColumnNames: _col0, _col1, _col2, _col3 - Select Vectorization: - className: VectorSelectOperator + Statistics: Num rows: 12 Data size: 2274 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator native: true - projectedOutputColumnNums: [0, 1, 2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int), _col1 (type: string) - sort order: ++ - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Select Operator + expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - value expressions: _col2 (type: int), _col3 (type: string) + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int), _col1 (type: string) + sort order: ++ + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -17210,15 +19029,9 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [string] Map 3 Map Operator Tree: TableScan @@ -17227,12 +19040,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator @@ -17241,10 +19052,8 @@ STAGE PLANS: Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Execution mode: vectorized, llap @@ -17258,12 +19067,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -17272,12 +19075,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -17286,16 +19087,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -17306,10 +19104,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -17322,27 +19118,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string, VALUE._col0:int, VALUE._col1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) @@ -17350,14 +19133,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1, 2, 3] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -17395,10 +19177,10 @@ POSTHOOK: Input: default@t3_n35 10 val_10 10 val_5 4 val_4 4 val_2 8 val_8 8 val_4 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select * from t3_n35 a left semi join t1_n148 b on a.key = b.key and a.value=b.value sort by a.key, a.value POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -17426,12 +19208,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:int), SelectColumnIsNotNull(col 1:string)) predicate: (key is not null and value is not null) (type: boolean) Statistics: Num rows: 20 Data size: 3760 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -17441,26 +19221,21 @@ STAGE PLANS: 0 key (type: int), value (type: string) 1 _col0 (type: int), _col1 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [0, 1] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -17473,12 +19248,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -17487,12 +19256,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: SelectColumnIsNotNull(col 0:int), SelectColumnIsNotNull(col 1:string)) predicate: (key is not null and value is not null) (type: boolean) Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -17501,16 +19268,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int, col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 @@ -17521,10 +19285,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2068 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -17537,27 +19299,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:int, KEY.reducesinkkey1:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) @@ -17565,7 +19314,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -17608,10 +19356,10 @@ POSTHOOK: Input: default@t3_n35 5 val_5 8 val_8 9 val_9 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -17639,44 +19387,50 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 - Left Semi Join 0 to 2 keys: 0 key (type: int) 1 _col0 (type: int) - 2 _col0 (type: int) Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: One MapJoin Condition IS false + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0 input vertices: 1 Map 3 - 2 Map 4 - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -17685,15 +19439,9 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -17702,12 +19450,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -17716,16 +19462,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -17736,10 +19479,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -17749,15 +19490,231 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Map 4 + Map Operator Tree: + TableScan + alias: c + filterExpr: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: key is not null (type: boolean) + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reducer 2 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: int) + outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: type: QUERY +PREHOOK: Input: default@t1_n148 +PREHOOK: Input: default@t2_n87 +PREHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1_n148 +POSTHOOK: Input: default@t2_n87 +POSTHOOK: Input: default@t3_n35 +#### A masked pattern was here #### +0 +0 +0 +0 +0 +0 +10 +10 +10 +10 +4 +4 +8 +8 +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: type: QUERY +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: type: QUERY +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez +#### A masked pattern was here #### + Edges: + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0, _col5 + input vertices: + 1 Map 3 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + Map 3 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -17766,12 +19723,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -17780,16 +19735,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -17800,10 +19752,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -17816,27 +19766,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -17844,14 +19781,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 46 Data size: 184 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -17863,13 +19799,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select /*+ mapjoin(b, c) */ a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key left semi join t2_n87 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -17881,6 +19817,18 @@ POSTHOOK: Input: default@t3_n35 0 0 0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 10 10 10 @@ -17889,11 +19837,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail -select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -17908,48 +19856,26 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Map Join Operator - condition map: - Left Outer Join 0 to 1 - Left Semi Join 1 to 2 - keys: - 0 key (type: int) - 1 key (type: int) - 2 _col0 (type: int) - Map Join Vectorization: - bigTableKeyExpressions: col 0:int - bigTableValueExpressions: col 0:int - className: VectorMapJoinOperator - native: false - nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - nativeConditionsNotMet: One MapJoin Condition IS false - outputColumnNames: _col0 - input vertices: - 1 Map 3 - 2 Map 4 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Reduce Sink Vectorization: - className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -17958,34 +19884,25 @@ STAGE PLANS: inputFormatFeatureSupport: [DECIMAL_64] featureSupportInUse: [DECIMAL_64] inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 3 + Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -17997,51 +19914,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 4 + Map 5 Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18053,27 +19965,46 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 + Execution mode: llap + Reduce Operator Tree: + Merge Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 key (type: int) + 1 key (type: int) + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false + Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -18081,14 +20012,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -18100,13 +20030,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +PREHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left outer join t1_n148 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key +POSTHOOK: query: select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -18138,10 +20068,13 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail +NULL +NULL +NULL +PREHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t1_n148 a full outer join t3_n35 b on a.key = b.key left semi join t2_n87 c on b.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -18157,7 +20090,7 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (BROADCAST_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -18168,17 +20101,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -18191,12 +20121,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -18204,17 +20128,14 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -18227,51 +20148,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan alias: c + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18283,44 +20199,46 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 - Left Semi Join 1 to 2 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) - 2 _col0 (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + outputColumnNames: _col0, _col5 + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: _col5 is not null (type: boolean) + Statistics: Num rows: 24 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 _col5 (type: int) + 1 _col0 (type: int) + outputColumnNames: _col0 + input vertices: + 1 Map 5 + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -18328,14 +20246,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 105 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -18388,10 +20305,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -18407,29 +20324,62 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) - Reducer 3 <- Reducer 2 (SIMPLE_EDGE) + Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 3 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 key (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18441,51 +20391,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 4 + Map 3 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18497,30 +20442,21 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] - Map 5 + Map 4 Map Operator Tree: TableScan alias: c Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -18533,44 +20469,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 - Execution mode: llap - Reduce Operator Tree: - Merge Join Operator - condition map: - Left Semi Join 0 to 1 - Left Outer Join 0 to 2 - keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) - outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE - Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -18578,14 +20484,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -18638,11 +20543,11 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail -select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: explain vectorization operator +select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: enabled: true @@ -18657,7 +20562,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -18665,21 +20571,40 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18691,51 +20616,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18747,12 +20667,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -18760,17 +20674,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -18783,44 +20694,32 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Right Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -18828,14 +20727,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -18847,13 +20745,13 @@ STAGE PLANS: Processor Tree: ListSink -PREHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +PREHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY PREHOOK: Input: default@t1_n148 PREHOOK: Input: default@t2_n87 PREHOOK: Input: default@t3_n35 #### A masked pattern was here #### -POSTHOOK: query: select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key right outer join t1_n148 c on a.key = c.key sort by a.key +POSTHOOK: query: select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n148 POSTHOOK: Input: default@t2_n87 @@ -18881,19 +20779,30 @@ POSTHOOK: Input: default@t3_n35 10 10 10 +10 +10 +10 +10 +10 +10 +10 +10 +2 4 4 +5 +5 +5 8 8 +9 NULL NULL NULL -NULL -NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t1_n148 b on a.key = b.key full outer join t2_n87 c on a.key = c.key sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -18909,7 +20818,8 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) + Map 1 <- Map 4 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 5 (SIMPLE_EDGE) Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: @@ -18917,21 +20827,40 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a + filterExpr: key is not null (type: boolean) Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] - Statistics: Num rows: 22 Data size: 88 Basic stats: COMPLETE Column stats: NONE + predicate: key is not null (type: boolean) + Statistics: Num rows: 21 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Semi Join 0 to 1 + keys: + 0 key (type: int) + 1 _col0 (type: int) + Map Join Vectorization: + className: VectorMapJoinLeftSemiLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + outputColumnNames: _col0 + input vertices: + 1 Map 4 + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 23 Data size: 92 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18943,51 +20872,46 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan alias: b + filterExpr: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] - Select Operator - expressions: key (type: int) - outputColumnNames: _col0 - Select Vectorization: - className: VectorSelectOperator + Filter Operator + Filter Vectorization: + className: VectorFilterOperator native: true - projectedOutputColumnNums: [0] + predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Group By Operator - Group By Vectorization: - className: VectorGroupByOperator - groupByMode: HASH - keyExpressions: col 0:int - native: false - vectorProcessingMode: HASH - projectedOutputColumnNums: [] - keys: _col0 (type: int) - mode: hash + Select Operator + expressions: key (type: int) outputColumnNames: _col0 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: int) - sort order: + - Map-reduce partition columns: _col0 (type: int) - Reduce Sink Vectorization: - className: VectorReduceSinkLongOperator - keyColumnNums: [0] - native: true - nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] + Group By Operator + Group By Vectorization: + className: VectorGroupByOperator + groupByMode: HASH + native: false + vectorProcessingMode: HASH + keys: _col0 (type: int) + mode: hash + outputColumnNames: _col0 Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs Map Vectorization: @@ -18999,12 +20923,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 5 Map Operator Tree: TableScan @@ -19012,17 +20930,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -19035,44 +20950,32 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Left Semi Join 0 to 1 - Outer Join 0 to 2 + Full Outer Join 0 to 1 keys: - 0 key (type: int) - 1 _col0 (type: int) - 2 key (type: int) + 0 _col0 (type: int) + 1 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -19080,14 +20983,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 48 Data size: 193 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 101 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -19153,10 +21055,10 @@ POSTHOOK: Input: default@t3_n35 NULL NULL NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.key = b.key left outer join t1_n148 c on a.value = c.value sort by a.key POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -19184,12 +21086,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 21 Data size: 3948 Basic stats: COMPLETE Column stats: NONE Map Join Operator @@ -19199,17 +21099,14 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] outputColumnNames: _col0, _col1 input vertices: 1 Map 3 Statistics: Num rows: 23 Data size: 4342 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Map Join Operator condition map: Left Outer Join 0 to 1 @@ -19217,26 +21114,21 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] outputColumnNames: _col0 input vertices: 1 Map 4 Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -19249,12 +21141,6 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 3 Map Operator Tree: TableScan @@ -19263,12 +21149,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 0:int) predicate: key is not null (type: boolean) Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -19277,16 +21161,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 0:int native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: int) mode: hash outputColumnNames: _col0 @@ -19297,10 +21178,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -19313,12 +21192,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 4 Map Operator Tree: TableScan @@ -19326,17 +21199,14 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -19349,27 +21219,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:int - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) @@ -19377,7 +21234,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0] Statistics: Num rows: 25 Data size: 4776 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -19443,10 +21299,10 @@ POSTHOOK: Input: default@t3_n35 4 8 8 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain vectorization operator select a.key from t3_n35 a left semi join t2_n87 b on a.value = b.value where a.key > 100 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -19473,12 +21329,10 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 4136 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: FilterExprAndExpr(children: FilterLongColGreaterLongScalar(col 0:int, val 100), SelectColumnIsNotNull(col 1:string)) predicate: ((key > 100) and value is not null) (type: boolean) Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -19487,7 +21341,6 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 1] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: @@ -19496,17 +21349,14 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] className: VectorMapJoinLeftSemiStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] outputColumnNames: _col0 input vertices: 1 Map 2 Statistics: Num rows: 12 Data size: 2226 Basic stats: COMPLETE Column stats: NONE + HybridGraceHashJoin: true File Output Operator compressed: false File Sink Vectorization: @@ -19528,12 +21378,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Map 2 Map Operator Tree: TableScan @@ -19542,12 +21386,10 @@ STAGE PLANS: Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:key:int, 1:value:string, 2:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator native: true - predicateExpression: SelectColumnIsNotNull(col 1:string) predicate: value is not null (type: boolean) Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -19556,16 +21398,13 @@ STAGE PLANS: Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [1] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Group By Operator Group By Vectorization: className: VectorGroupByOperator groupByMode: HASH - keyExpressions: col 1:string native: false vectorProcessingMode: HASH - projectedOutputColumnNums: [] keys: _col0 (type: string) mode: hash outputColumnNames: _col0 @@ -19576,10 +21415,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 11 Data size: 2024 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -19592,12 +21429,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [1] - dataColumns: key:int, value:string - partitionColumnCount: 0 - scratchColumnTypeNames: [] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vector_like_2.q.out ql/src/test/results/clientpositive/llap/vector_like_2.q.out index 849d6c69fc..31b7326f4b 100644 --- ql/src/test/results/clientpositive/llap/vector_like_2.q.out +++ ql/src/test/results/clientpositive/llap/vector_like_2.q.out @@ -63,10 +63,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:boolean Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean) Execution mode: vectorized, llap @@ -91,7 +91,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -135,9 +135,9 @@ POSTHOOK: query: select a, a like "%bar" from foo order by a POSTHOOK: type: QUERY POSTHOOK: Input: default@foo #### A masked pattern was here #### -NULL NULL some bar true some foo false +NULL NULL PREHOOK: query: select a, a like "%bar" from foo order by a PREHOOK: type: QUERY PREHOOK: Input: default@foo @@ -146,6 +146,6 @@ POSTHOOK: query: select a, a like "%bar" from foo order by a POSTHOOK: type: QUERY POSTHOOK: Input: default@foo #### A masked pattern was here #### -NULL NULL some bar true some foo false +NULL NULL diff --git ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out index f503761c4d..8ac3a11cbb 100644 --- ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out +++ ql/src/test/results/clientpositive/llap/vector_llap_io_data_conversion.q.out @@ -104,10 +104,9 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:varchar(10), 1:int, 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 10 Data size: 2820 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: all inputs @@ -131,7 +130,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out index 79d7746c15..29c4bc1b1c 100644 --- ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_llap_text_1.q.out @@ -167,11 +167,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [1] + partitionColumns: 0:int + valueColumns: 1:string Statistics: Num rows: 242 Data size: 22990 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -223,14 +223,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [1] + bigTableValueColumns: 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 4, 1] - smallTableMapping: [4] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 0:int, 4:string, 1:string + smallTableValueMapping: 4:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3 input vertices: 0 Map 1 @@ -248,10 +250,9 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 4, 1] + keyColumns: 0:int, 4:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 399 Data size: 74214 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -277,7 +278,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out index 94733cff67..3364035276 100644 --- ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out +++ ql/src/test/results/clientpositive/llap/vector_mapjoin_reduce.q.out @@ -203,6 +203,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3 input vertices: 1 Map 3 @@ -217,6 +218,7 @@ STAGE PLANS: className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col3 input vertices: 1 Map 4 @@ -477,6 +479,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3, _col4 input vertices: 1 Map 3 @@ -491,6 +494,7 @@ STAGE PLANS: className: VectorMapJoinLeftSemiMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col3 input vertices: 1 Map 4 diff --git ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out index e81d7dfc38..58c00b6819 100644 --- ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out +++ ql/src/test/results/clientpositive/llap/vector_mr_diff_schema_alias.q.out @@ -354,6 +354,9 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col1 (type: int) Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -383,6 +386,9 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out index 473514f047..78d1e38c28 100644 --- ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out +++ ql/src/test/results/clientpositive/llap/vector_nullsafe_join.q.out @@ -846,26 +846,141 @@ NULL 35 NULL NULL NULL NULL 10 NULL NULL NULL 48 NULL NULL NULL NULL NULL -PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL DEBUG +SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL DEBUG +SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez #### A masked pattern was here #### -10 NULL NULL 10 -100 100 100 100 -NULL 10 10 NULL -NULL 10 48 NULL -NULL 10 NULL NULL -NULL 35 10 NULL -NULL 35 48 NULL -NULL 35 NULL NULL -NULL NULL 10 NULL -NULL NULL 48 NULL -NULL NULL NULL 35 -NULL NULL NULL NULL + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:value:int, 2:ROW__ID:struct] + Reduce Output Operator + key expressions: key (type: int) + sort order: + + output key column names: KEY.reducesinkkey0 + output value column names: VALUE._col0 + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:int + Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:value:int, 2:ROW__ID:struct] + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keyContext: [types [int], serde=org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe, hasFilter=false] + outer filter mappings: [null, [0, 0]] + valueContexts: [0:[types [int], serde=org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, hasFilter=false]] + keyExpressions: + 0 [Column[key]] + 1 [Column[value]] + keys: + 0 key (type: int) + 1 value (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + bigTableValueExpressions: col 0:int, col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.vectorized.execution.mapjoin.native.enabled IS false, No nullsafe IS false + nullSafes: [true] + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 6 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 6 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, bigint] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key<=>b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 @@ -1745,26 +1860,141 @@ NULL 35 NULL NULL NULL NULL 10 NULL NULL NULL 48 NULL NULL NULL NULL NULL -PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value +PREHOOK: query: EXPLAIN VECTORIZATION DETAIL DEBUG +SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value PREHOOK: type: QUERY -PREHOOK: Input: default@myinput1 -#### A masked pattern was here #### -POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value +POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL DEBUG +SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ON a.key<=>b.value POSTHOOK: type: QUERY -POSTHOOK: Input: default@myinput1 +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez #### A masked pattern was here #### -10 NULL NULL 10 -100 100 100 100 -NULL 10 10 NULL -NULL 10 48 NULL -NULL 10 NULL NULL -NULL 35 10 NULL -NULL 35 48 NULL -NULL 35 NULL NULL -NULL NULL 10 NULL -NULL NULL 48 NULL -NULL NULL NULL 35 -NULL NULL NULL NULL + Edges: + Map 2 <- Map 1 (BROADCAST_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:value:int, 2:ROW__ID:struct] + Reduce Output Operator + key expressions: key (type: int) + sort order: + + output key column names: KEY.reducesinkkey0 + output value column names: VALUE._col0 + Map-reduce partition columns: key (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + keyColumns: 0:int + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + valueColumns: 1:int + Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: int) + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [] + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 6 Data size: 48 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + vectorizationSchemaColumns: [0:key:int, 1:value:int, 2:ROW__ID:struct] + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keyContext: [types [int], serde=org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe, hasFilter=false] + outer filter mappings: [null, [0, 0]] + valueContexts: [0:[types [int], serde=org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, hasFilter=false]] + keyExpressions: + 0 [Column[key]] + 1 [Column[value]] + keys: + 0 key (type: int) + 1 value (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + bigTableValueExpressions: col 0:int, col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: No nullsafe IS false + nullSafes: [true] + outputColumnNames: _col0, _col1, _col5, _col6 + input vertices: + 0 Map 1 + Statistics: Num rows: 6 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [0, 1, 2, 3] + Statistics: Num rows: 6 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized, llap + LLAP IO: all inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vectorized.input.format IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 2 + includeColumns: [0, 1] + dataColumns: key:int, value:int + partitionColumnCount: 0 + scratchColumnTypeNames: [bigint, bigint] + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT * FROM myinput1 a FULL OUTER JOIN myinput1 b ON a.key<=>b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1 diff --git ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out index 7bc7fb2d6e..5e7e28d3b9 100644 --- ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out +++ ql/src/test/results/clientpositive/llap/vector_orc_nested_column_pruning.q.out @@ -1492,6 +1492,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1638,6 +1641,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1811,6 +1817,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1949,6 +1958,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -2110,6 +2122,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vector_order_null.q.out ql/src/test/results/clientpositive/llap/vector_order_null.q.out index cb4053ee5a..cc05a8dd0f 100644 --- ql/src/test/results/clientpositive/llap/vector_order_null.q.out +++ ql/src/test/results/clientpositive/llap/vector_order_null.q.out @@ -116,10 +116,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -238,10 +237,9 @@ STAGE PLANS: sort order: -+ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -360,10 +358,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -482,10 +479,9 @@ STAGE PLANS: sort order: -+ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -604,10 +600,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -726,10 +721,9 @@ STAGE PLANS: sort order: -+ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -848,10 +842,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -875,7 +868,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: za + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -923,9 +916,9 @@ x.a x.b 1 A 2 A 2 B -NULL NULL 2 NULL 3 NULL +NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT x.* FROM src_null_n3 x ORDER BY b desc nulls last, a PREHOOK: type: QUERY @@ -970,10 +963,9 @@ STAGE PLANS: sort order: -+ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -997,7 +989,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: za + reduceColumnNullOrder: zz reduceColumnSortOrder: -+ allNative: false usesVectorUDFAdaptor: false @@ -1045,9 +1037,9 @@ x.a x.b 2 B 1 A 2 A -NULL NULL 2 NULL 3 NULL +NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT x.* FROM src_null_n3 x ORDER BY a asc nulls last, b desc PREHOOK: type: QUERY @@ -1092,10 +1084,9 @@ STAGE PLANS: sort order: +- Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1214,10 +1205,9 @@ STAGE PLANS: sort order: -- Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1336,10 +1326,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs diff --git ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out index aa0a46fa3e..70fd6972f5 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join0.q.out @@ -107,15 +107,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableOuterKeyMapping: 1 -> 3 - bigTableRetainedColumnNums: [0, 1, 3] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:string, 1:int className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3, 4] - smallTableMapping: [4] + outerSmallTableKeyMapping: 1 -> 3 + projectedOutput: 0:string, 1:int, 3:int, 4:string + smallTableValueMapping: 4:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 @@ -169,10 +170,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:string Statistics: Num rows: 6 Data size: 554 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -259,10 +260,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 6 Data size: 554 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Execution mode: vectorized, llap @@ -305,15 +306,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableOuterKeyMapping: 0 -> 4 - bigTableRetainedColumnNums: [0, 1, 4] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 4, 0, 1] - smallTableMapping: [3] + outerSmallTableKeyMapping: 0 -> 4 + projectedOutput: 3:string, 4:int, 0:int, 1:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 0 Map 1 diff --git ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out index c74a588993..3caa9793fb 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join1.q.out @@ -120,11 +120,11 @@ POSTHOOK: query: select * from small_alltypesorc3a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: select * from small_alltypesorc4a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a @@ -211,11 +211,11 @@ POSTHOOK: Input: default@small_alltypesorc_a -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail select * from small_alltypesorc_a c @@ -248,7 +248,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3697 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 3745 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -259,7 +259,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - Statistics: Num rows: 15 Data size: 3697 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 3745 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -267,25 +267,26 @@ STAGE PLANS: 0 _col2 (type: int) 1 _col2 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableOuterKeyMapping: 2 -> 15 - bigTableRetainedColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 15] - bigTableValueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + bigTableKeyColumns: 2:int + bigTableRetainColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + bigTableValueColumns: 0:tinyint, 1:smallint, 2:int, 3:bigint, 4:float, 5:double, 6:string, 7:string, 8:timestamp, 9:timestamp, 10:boolean, 11:boolean className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24] - smallTableMapping: [13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24] + outerSmallTableKeyMapping: 2 -> 15 + projectedOutput: 0:tinyint, 1:smallint, 2:int, 3:bigint, 4:float, 5:double, 6:string, 7:string, 8:timestamp, 9:timestamp, 10:boolean, 11:boolean, 13:tinyint, 14:smallint, 15:int, 16:bigint, 17:float, 18:double, 19:string, 20:string, 21:timestamp, 22:timestamp, 23:boolean, 24:boolean + smallTableValueMapping: 13:tinyint, 14:smallint, 16:bigint, 17:float, 18:double, 19:string, 20:string, 21:timestamp, 22:timestamp, 23:boolean, 24:boolean + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23 input vertices: 1 Map 2 - Statistics: Num rows: 28 Data size: 15376 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 28 Data size: 15472 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 28 Data size: 15376 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 28 Data size: 15472 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -311,7 +312,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cd - Statistics: Num rows: 15 Data size: 3697 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 3745 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -322,18 +323,18 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - Statistics: Num rows: 15 Data size: 3697 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 3745 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col2 (type: int) sort order: + Map-reduce partition columns: _col2 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 3, 4, 5, 6, 7, 8, 9, 10, 11] - Statistics: Num rows: 15 Data size: 3697 Basic stats: COMPLETE Column stats: COMPLETE + valueColumns: 0:tinyint, 1:smallint, 3:bigint, 4:float, 5:double, 6:string, 7:string, 8:timestamp, 9:timestamp, 10:boolean, 11:boolean + Statistics: Num rows: 15 Data size: 3745 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: string), _col7 (type: string), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: boolean), _col11 (type: boolean) Execution mode: vectorized, llap LLAP IO: all inputs @@ -387,11 +388,11 @@ POSTHOOK: Input: default@small_alltypesorc_a -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail select c.ctinyint from small_alltypesorc_a c @@ -443,13 +444,14 @@ STAGE PLANS: 0 _col0 (type: tinyint) 1 _col0 (type: tinyint) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:tinyint + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:tinyint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutput: 0:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 2 @@ -503,10 +505,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 15 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -709,13 +710,14 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 2:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:tinyint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutput: 0:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 3 @@ -727,13 +729,14 @@ STAGE PLANS: 0 _col0 (type: tinyint) 1 _col0 (type: tinyint) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:tinyint + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:tinyint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutput: 0:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 4 @@ -754,10 +757,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap @@ -799,10 +801,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 15 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -843,10 +844,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 15 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs diff --git ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out index 2e90aaedb4..9f7f1743c8 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join2.q.out @@ -94,11 +94,11 @@ POSTHOOK: query: select * from small_alltypesorc1a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a_n0 #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-51 NULL -1064981602 -1444011153 -51.0 NULL aY3tpnr6wfvmWMG0U881 2Ol4N3Ha0815Ej54lA2N 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1065775394 -1331703092 -51.0 NULL aD88uS2N8DmqPlvjOa7F46i7 Ut8ka2o8iokF504065PYS 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1066684273 2034191923 -51.0 NULL 2W4Kg220OcCy065HG60k6e D7GOQhc3qbAR6 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1067683781 1750003656 -51.0 NULL IbgbUvP5 47x2I874 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1071480828 -1401575336 -51.0 NULL aw724t8c5558x2xneC624 4uE7l74tESBiKfu7c8wM7GA 1969-12-31 16:00:08.451 NULL true true PREHOOK: query: select * from small_alltypesorc2a_n0 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a_n0 @@ -120,11 +120,11 @@ POSTHOOK: query: select * from small_alltypesorc3a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a_n0 #### A masked pattern was here #### -NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL -NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL -NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL -NULL -15632 528534767 NULL NULL -15632.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 15:59:53.593 true NULL -NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.582 1969-12-31 16:00:00.518 true NULL +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: select * from small_alltypesorc4a_n0 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a_n0 @@ -206,26 +206,26 @@ POSTHOOK: query: select * from small_alltypesorc_a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n0 #### A masked pattern was here #### +-51 NULL -1064981602 -1444011153 -51.0 NULL aY3tpnr6wfvmWMG0U881 2Ol4N3Ha0815Ej54lA2N 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1065775394 -1331703092 -51.0 NULL aD88uS2N8DmqPlvjOa7F46i7 Ut8ka2o8iokF504065PYS 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1066684273 2034191923 -51.0 NULL 2W4Kg220OcCy065HG60k6e D7GOQhc3qbAR6 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1067683781 1750003656 -51.0 NULL IbgbUvP5 47x2I874 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1071480828 -1401575336 -51.0 NULL aw724t8c5558x2xneC624 4uE7l74tESBiKfu7c8wM7GA 1969-12-31 16:00:08.451 NULL true true -60 -200 NULL NULL -60.0 -200.0 NULL NULL 1969-12-31 16:00:11.996 1969-12-31 15:59:55.451 NULL NULL -61 -7196 NULL NULL -61.0 -7196.0 NULL 8Mlns2Tl6E0g 1969-12-31 15:59:44.823 1969-12-31 15:59:58.174 NULL false -61 -7196 NULL NULL -61.0 -7196.0 NULL fUJIN 1969-12-31 16:00:11.842 1969-12-31 15:59:58.174 NULL false -62 -7196 NULL NULL -62.0 -7196.0 NULL jf1Cw6qhkNToQuud 1969-12-31 16:00:12.388 1969-12-31 15:59:58.174 NULL false -62 -7196 NULL NULL -62.0 -7196.0 NULL yLiOchx5PfDTFdcMduBTg 1969-12-31 16:00:02.373 1969-12-31 15:59:58.174 NULL false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true -64 -7196 NULL 406535485 -64.0 -7196.0 NULL E011i 1969-12-31 15:59:56.048 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL -NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL -NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL -NULL -15632 528534767 NULL NULL -15632.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 15:59:53.593 true NULL -NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.582 1969-12-31 16:00:00.518 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: explain vectorization detail select count(*), sum(t1.c_cbigint) from (select c.cbigint as c_cbigint from small_alltypesorc_a_n0 c @@ -284,13 +284,14 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 2:int + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:bigint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + projectedOutput: 3:bigint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 3 @@ -302,17 +303,18 @@ STAGE PLANS: 0 _col1 (type: bigint) 1 _col0 (type: bigint) Map Join Vectorization: - bigTableKeyColumnNums: [3] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 3:bigint + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:bigint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + projectedOutput: 3:bigint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 4 - Statistics: Num rows: 142 Data size: 1064 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 100 Data size: 728 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(), sum(_col1) Group By Vectorization: @@ -329,10 +331,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap @@ -374,10 +375,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 20 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -418,10 +418,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 20 Data size: 88 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -505,4 +504,4 @@ left outer join small_alltypesorc_a_n0 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n0 #### A masked pattern was here #### -34 -26289186744 +24 -3110813706 diff --git ql/src/test/results/clientpositive/llap/vector_outer_join3.q.out ql/src/test/results/clientpositive/llap/vector_outer_join3.q.out index af697479af..a975d8afc7 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join3.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join3.q.out @@ -94,11 +94,11 @@ POSTHOOK: query: select * from small_alltypesorc1a_n1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a_n1 #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: select * from small_alltypesorc2a_n1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a_n1 @@ -216,16 +216,16 @@ POSTHOOK: Input: default@small_alltypesorc_a_n1 -51 NULL NULL -1874052220 -51.0 NULL c61B47I604gymFJ sjWQS78 1969-12-31 16:00:08.451 NULL false false -51 NULL NULL -1927203921 -51.0 NULL 45ja5suO 42S0I0 1969-12-31 16:00:08.451 NULL true true -51 NULL NULL -1970551565 -51.0 NULL r2uhJH3 loXMWyrHjVeK 1969-12-31 16:00:08.451 NULL false false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true -64 -7196 NULL 406535485 -64.0 -7196.0 NULL E011i 1969-12-31 15:59:56.048 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -265,7 +265,7 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 #### A masked pattern was here #### -20 +32 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -305,7 +305,7 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 #### A masked pattern was here #### -28 +24 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -345,4 +345,4 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 #### A masked pattern was here #### -28 +24 diff --git ql/src/test/results/clientpositive/llap/vector_outer_join4.q.out ql/src/test/results/clientpositive/llap/vector_outer_join4.q.out index 8b2d98951a..88e459382d 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_join4.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_join4.q.out @@ -130,16 +130,16 @@ POSTHOOK: query: select * from small_alltypesorc3b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3b #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: select * from small_alltypesorc4b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4b @@ -236,16 +236,16 @@ POSTHOOK: Input: default@small_alltypesorc_b -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail formatted select * from small_alltypesorc_b c @@ -317,16 +317,16 @@ POSTHOOK: Input: default@small_alltypesorc_b -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -3097 253665376 NULL -64.0 -3097.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.013 1969-12-31 16:00:06.097 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail formatted select c.ctinyint from small_alltypesorc_b c diff --git ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out index ba2d9dfa4a..0f40378606 100644 --- ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out +++ ql/src/test/results/clientpositive/llap/vector_outer_reference_windowed.q.out @@ -296,10 +296,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(25,2) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(25,2)) Execution mode: vectorized, llap @@ -353,11 +352,11 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int keyExpressions: ConstantVectorExpression(val 0) -> 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(25,2) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(25,2)) Reducer 3 @@ -517,10 +516,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)), _col1 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:decimal(15,2), 1:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:decimal(25,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(25,2)) Execution mode: vectorized, llap @@ -576,11 +575,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:decimal(15,2), 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2] + partitionColumns: 1:decimal(15,2) + valueColumns: 2:decimal(25,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(25,2)) Reducer 3 @@ -588,7 +587,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -616,7 +615,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -747,10 +746,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(15,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -800,10 +799,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -845,6 +843,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)), _col1 (type: decimal(15,2)) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(25,2)) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -881,11 +882,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:decimal(15,2), 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2] + partitionColumns: 1:decimal(15,2) + valueColumns: 2:decimal(25,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(25,2)) Reducer 4 @@ -893,7 +894,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -921,7 +922,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -1056,10 +1057,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1108,10 +1108,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(15,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -1154,6 +1154,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)), _col1 (type: decimal(15,2)) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(25,2)) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1190,11 +1193,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:decimal(15,2), 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2] + partitionColumns: 1:decimal(15,2) + valueColumns: 2:decimal(25,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(25,2)) Reducer 4 @@ -1202,7 +1205,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1230,7 +1233,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -1364,10 +1367,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(15,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -1417,10 +1420,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(15,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(15,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(15,2)) Execution mode: vectorized, llap @@ -1463,6 +1466,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(15,2)) Statistics: Num rows: 2 Data size: 704 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: struct) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Vectorization: @@ -1486,7 +1492,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1594,10 +1600,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(17,2) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -1651,11 +1656,11 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:int keyExpressions: ConstantVectorExpression(val 0) -> 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:decimal(17,2) Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(17,2)) Reducer 3 @@ -1815,10 +1820,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)), _col1 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:decimal(7,2), 1:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:decimal(17,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) Execution mode: vectorized, llap @@ -1874,11 +1879,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:decimal(7,2), 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2] + partitionColumns: 1:decimal(7,2) + valueColumns: 2:decimal(17,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) Reducer 3 @@ -1886,7 +1891,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1914,7 +1919,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -2045,10 +2050,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(7,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2098,10 +2103,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -2143,6 +2147,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)), _col1 (type: decimal(7,2)) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2179,11 +2186,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:decimal(7,2), 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2] + partitionColumns: 1:decimal(7,2) + valueColumns: 2:decimal(17,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) Reducer 4 @@ -2191,7 +2198,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2219,7 +2226,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -2354,10 +2361,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 4 Data size: 448 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -2406,10 +2412,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(7,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2452,6 +2458,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)), _col1 (type: decimal(7,2)) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2488,11 +2497,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:decimal(7,2), 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2] + partitionColumns: 1:decimal(7,2) + valueColumns: 2:decimal(17,2) Statistics: Num rows: 2 Data size: 672 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: decimal(17,2)) Reducer 4 @@ -2500,7 +2509,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2528,7 +2537,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -2662,10 +2671,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(7,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2715,10 +2724,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:decimal(7,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:decimal(7,2) Statistics: Num rows: 4 Data size: 896 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: decimal(7,2)) Execution mode: vectorized, llap @@ -2761,6 +2770,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(7,2)) Statistics: Num rows: 2 Data size: 704 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: struct) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Vectorization: @@ -2784,7 +2796,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out index ef4934e4cb..8bec4aa715 100644 --- ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out +++ ql/src/test/results/clientpositive/llap/vector_ptf_1.q.out @@ -98,11 +98,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2, 3] + partitionColumns: 0:string + valueColumns: 2:double, 3:bigint Statistics: Num rows: 100 Data size: 18816 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: double), _col3 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out index 44bfe20720..9b1b34e039 100644 --- ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out +++ ql/src/test/results/clientpositive/llap/vector_ptf_part_simple.q.out @@ -142,10 +142,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -402,10 +402,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -631,10 +631,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -860,11 +860,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -889,7 +889,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -917,7 +917,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1122,11 +1122,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -1167,7 +1167,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1352,11 +1352,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -1397,7 +1397,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1582,12 +1582,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:string keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 2] + partitionColumns: 5:int + valueColumns: 0:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_mfgr (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -1612,7 +1612,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1640,7 +1640,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -1845,12 +1845,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:string keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 2] + partitionColumns: 5:int + valueColumns: 0:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_mfgr (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -1891,7 +1891,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -2076,12 +2076,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:string keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 2] + partitionColumns: 5:int + valueColumns: 0:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_mfgr (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2122,7 +2122,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -2301,10 +2301,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2529,10 +2529,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2757,10 +2757,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2985,11 +2985,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -3014,7 +3014,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -3042,7 +3042,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -3215,11 +3215,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -3244,7 +3244,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -3272,7 +3272,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -3445,11 +3445,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -3474,7 +3474,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -3502,7 +3502,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -3675,12 +3675,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:string keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 2] + partitionColumns: 5:int + valueColumns: 0:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_mfgr (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -3705,7 +3705,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -3733,7 +3733,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -3906,12 +3906,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:string keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 2] + partitionColumns: 5:int + valueColumns: 0:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_mfgr (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -3936,7 +3936,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -3964,7 +3964,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -4137,12 +4137,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:string keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 2] + partitionColumns: 5:int + valueColumns: 0:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_mfgr (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -4167,7 +4167,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -4195,7 +4195,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -4410,10 +4410,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:decimal(38,18) Statistics: Num rows: 40 Data size: 12944 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: decimal(38,18)) Execution mode: vectorized, llap @@ -4638,11 +4638,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:decimal(38,18) Statistics: Num rows: 40 Data size: 12944 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: decimal(38,18)) Execution mode: vectorized, llap @@ -4667,7 +4667,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -4695,7 +4695,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -4888,10 +4888,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_bigint (type: bigint) Execution mode: vectorized, llap @@ -5116,11 +5116,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:bigint Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_bigint (type: bigint) Execution mode: vectorized, llap @@ -5145,7 +5145,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -5173,7 +5173,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -5340,10 +5340,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:double Statistics: Num rows: 40 Data size: 4216 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -5539,11 +5539,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -5568,7 +5568,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -5596,7 +5596,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -5740,11 +5740,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 6] + keyColumns: 0:string, 6:timestamp keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -5940,12 +5940,12 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 6, 1] + keyColumns: 0:string, 6:timestamp, 1:string keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 9] - valueColumnNums: [2] + partitionColumns: 0:string, 9:timestamp + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -5970,7 +5970,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: aaz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -5998,7 +5998,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END raw input shape: window functions: @@ -6272,10 +6272,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:double Statistics: Num rows: 40 Data size: 4216 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -6471,11 +6471,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -6500,7 +6500,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -6528,7 +6528,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -6672,12 +6672,12 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 6, 1] + keyColumns: 0:string, 6:timestamp, 1:string keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 9] - valueColumnNums: [2] + partitionColumns: 0:string, 9:timestamp + valueColumns: 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -6702,7 +6702,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: aaz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -6730,7 +6730,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0, CASE WHEN ((_col0 = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END raw input shape: window functions: @@ -6874,11 +6874,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), CASE WHEN ((p_mfgr = 'Manufacturer#2')) THEN (TIMESTAMP'2000-01-01 00:00:00') ELSE (CAST( null AS TIMESTAMP)) END (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 6] + keyColumns: 0:string, 6:timestamp keyExpressions: IfExprColumnNull(col 4:boolean, col 5:timestamp, null)(children: StringGroupColEqualStringScalar(col 0:string, val Manufacturer#2) -> 4:boolean, ConstantVectorExpression(val 2000-01-01 00:00:00) -> 5:timestamp) -> 6:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:double Statistics: Num rows: 40 Data size: 9096 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out index 038300e7b8..2f604bb202 100644 --- ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out +++ ql/src/test/results/clientpositive/llap/vector_reduce_groupby_duplicate_cols.q.out @@ -90,13 +90,15 @@ STAGE PLANS: 0 one (type: int), two (type: int) 1 1 (type: int), 2 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0, 1] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int, 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:int className: VectorMapJoinInnerBigOnlyMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int, 1:int + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1 input vertices: 1 Map 1 diff --git ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out index 4a10b3beb5..0b82230d59 100644 --- ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out +++ ql/src/test/results/clientpositive/llap/vector_reuse_scratchcols.q.out @@ -129,10 +129,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double, 4:double, 5:double, 6:double, 7:bigint, 8:double, 9:bigint, 10:tinyint Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: double), _col9 (type: bigint), _col10 (type: tinyint) Execution mode: vectorized, llap @@ -337,10 +336,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double, 4:double, 5:double, 6:double, 7:bigint, 8:double, 9:bigint, 10:tinyint Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: double), _col9 (type: bigint), _col10 (type: tinyint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_string_concat.q.out ql/src/test/results/clientpositive/llap/vector_string_concat.q.out index a4f32f16fa..39578d2616 100644 --- ql/src/test/results/clientpositive/llap/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/llap/vector_string_concat.q.out @@ -480,7 +480,6 @@ POSTHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@vectortab2korc_n0 #### A masked pattern was here #### -NULL Quarter 1-1970 Quarter 1-1971 Quarter 1-1972 @@ -530,3 +529,4 @@ Quarter 1-2015 Quarter 1-2016 Quarter 1-2017 Quarter 1-2018 +Quarter 1-2019 diff --git ql/src/test/results/clientpositive/llap/vector_topnkey.q.out ql/src/test/results/clientpositive/llap/vector_topnkey.q.out index 16803c9544..3d0793bffe 100644 --- ql/src/test/results/clientpositive/llap/vector_topnkey.q.out +++ ql/src/test/results/clientpositive/llap/vector_topnkey.q.out @@ -67,10 +67,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint) @@ -96,7 +96,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -126,10 +126,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 250 Data size: 23750 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: bigint) @@ -138,7 +138,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -259,10 +259,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -287,7 +286,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -315,10 +314,9 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 250 Data size: 21750 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Reducer 3 @@ -326,7 +324,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -434,10 +432,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -486,10 +483,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:string Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: vectorized, llap @@ -530,12 +527,15 @@ STAGE PLANS: Statistics: Num rows: 791 Data size: 140798 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vector_udf1.q.out ql/src/test/results/clientpositive/llap/vector_udf1.q.out index 5db37eb1e1..26d695b4b7 100644 --- ql/src/test/results/clientpositive/llap/vector_udf1.q.out +++ ql/src/test/results/clientpositive/llap/vector_udf1.q.out @@ -2800,10 +2800,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:string, 1:varchar(20) Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: varchar(20)) Execution mode: vectorized, llap @@ -2943,10 +2942,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:string, 1:varchar(20) Statistics: Num rows: 1 Data size: 288 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: varchar(20)) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_windowing.q.out ql/src/test/results/clientpositive/llap/vector_windowing.q.out index 53327bd7be..ef1e6538d8 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing.q.out @@ -43,11 +43,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -72,7 +72,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -100,7 +100,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -279,11 +279,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string, 2:int + valueColumns: 3:double Statistics: Num rows: 13 Data size: 3003 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: double) Execution mode: vectorized, llap @@ -339,11 +339,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2, 3] + partitionColumns: 1:string + valueColumns: 2:int, 3:double Statistics: Num rows: 13 Data size: 3003 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: double) Reducer 3 @@ -367,7 +367,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -529,11 +529,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string, 2:int + valueColumns: 3:double Statistics: Num rows: 13 Data size: 3003 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: double) Execution mode: vectorized, llap @@ -589,11 +589,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2, 3] + partitionColumns: 1:string + valueColumns: 2:int, 3:double Statistics: Num rows: 13 Data size: 3003 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: double) Reducer 3 @@ -617,7 +617,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -748,11 +748,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -777,7 +777,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -805,7 +805,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -942,11 +942,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -987,7 +987,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1143,11 +1143,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -1188,7 +1188,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1350,11 +1350,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 5, 7] + partitionColumns: 2:string + valueColumns: 0:int, 5:int, 7:double Statistics: Num rows: 26 Data size: 6110 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_partkey (type: int), p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -1396,11 +1396,10 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:int Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1440,7 +1439,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1471,6 +1470,9 @@ STAGE PLANS: Map-reduce partition columns: _col2 (type: string) Statistics: Num rows: 27 Data size: 6237 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: int), _col7 (type: double) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: llap Reduce Vectorization: @@ -1492,7 +1494,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1636,11 +1638,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1, 5] + keyColumns: 2:string, 1:string, 5:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [] + partitionColumns: 2:string Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1664,7 +1665,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaz + reduceColumnNullOrder: azz reduceColumnSortOrder: ++- allNative: false usesVectorUDFAdaptor: false @@ -1692,7 +1693,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1826,11 +1827,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -1855,7 +1856,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1883,7 +1884,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2035,11 +2036,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2064,7 +2065,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2092,7 +2093,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2246,11 +2247,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -2291,7 +2292,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2436,12 +2437,12 @@ STAGE PLANS: Map-reduce partition columns: 'Manufacturer#3' (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10, 1] + keyColumns: 10:string, 1:string keyExpressions: ConstantVectorExpression(val Manufacturer#3) -> 10:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [11] - valueColumnNums: [5] + partitionColumns: 11:string + valueColumns: 5:int Statistics: Num rows: 5 Data size: 1115 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -2482,7 +2483,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 'Manufacturer#3' raw input shape: window functions: @@ -2603,11 +2604,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -2648,7 +2649,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2769,11 +2770,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -2798,7 +2799,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2826,7 +2827,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2988,11 +2989,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -3033,7 +3034,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3134,7 +3135,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col12 ASC NULLS FIRST, _col11 ASC NULLS FIRST + order by: _col12 ASC NULLS LAST, _col11 ASC NULLS LAST partition by: _col12 raw input shape: window functions: @@ -3279,11 +3280,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -3324,7 +3325,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3387,7 +3388,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col9 ASC NULLS FIRST + order by: _col9 ASC NULLS LAST partition by: _col6 raw input shape: window functions: @@ -3429,7 +3430,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col6 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col6 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -3557,11 +3558,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -3586,7 +3587,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: true usesVectorUDFAdaptor: false @@ -3614,7 +3615,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3658,11 +3659,11 @@ STAGE PLANS: Map-reduce partition columns: _col2 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [3, 4, 2] + partitionColumns: 0:string + valueColumns: 3:bigint, 4:bigint, 2:int Statistics: Num rows: 26 Data size: 12766 Basic stats: COMPLETE Column stats: COMPLETE value expressions: count_window_0 (type: bigint), count_window_1 (type: bigint), _col5 (type: int) Reducer 3 @@ -3686,7 +3687,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST, _col3 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST, _col3 ASC NULLS LAST partition by: _col4 raw input shape: window functions: @@ -3809,11 +3810,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -3854,7 +3855,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4022,11 +4023,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int), _col3 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:string, 2:int, 3:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2, 3] - valueColumnNums: [4, 5] + partitionColumns: 0:string, 1:string, 2:int, 3:double + valueColumns: 4:double, 5:double Statistics: Num rows: 13 Data size: 3211 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col4 (type: double), _col5 (type: double) Execution mode: vectorized, llap @@ -4082,11 +4083,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2, 3, 4, 5] + partitionColumns: 1:string + valueColumns: 2:int, 3:double, 4:double, 5:double Statistics: Num rows: 13 Data size: 3211 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: double), _col4 (type: double), _col5 (type: double) Reducer 3 @@ -4110,7 +4111,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col0 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST, _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -4246,11 +4247,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -4291,7 +4292,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4442,11 +4443,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 5, 7] + partitionColumns: 2:string + valueColumns: 0:int, 5:int, 7:double Statistics: Num rows: 26 Data size: 6110 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_partkey (type: int), p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -4487,7 +4488,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4709,11 +4710,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 13 Data size: 2574 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: double) Execution mode: vectorized, llap @@ -4756,7 +4757,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -4781,7 +4782,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -4990,11 +4991,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [3, 7] + partitionColumns: 2:string + valueColumns: 3:string, 7:double Statistics: Num rows: 26 Data size: 8294 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_brand (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -5035,7 +5036,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5201,7 +5202,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col4 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -5478,11 +5479,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Reduce Output Operator @@ -5491,11 +5492,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Reduce Output Operator @@ -5504,11 +5505,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -5569,7 +5570,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5660,7 +5661,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5717,7 +5718,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col8 ASC NULLS FIRST + order by: _col8 ASC NULLS LAST partition by: _col5 raw input shape: window functions: @@ -5759,7 +5760,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col6 ASC NULLS FIRST, _col5 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col5 ASC NULLS LAST partition by: _col6 raw input shape: window functions: @@ -5820,7 +5821,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: true usesVectorUDFAdaptor: false @@ -5848,7 +5849,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5892,11 +5893,11 @@ STAGE PLANS: Map-reduce partition columns: _col2 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [3, 4, 2] + partitionColumns: 0:string + valueColumns: 3:bigint, 4:bigint, 2:int Statistics: Num rows: 26 Data size: 12766 Basic stats: COMPLETE Column stats: COMPLETE value expressions: count_window_0 (type: bigint), count_window_1 (type: bigint), _col5 (type: int) Reducer 9 @@ -5920,7 +5921,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST, _col3 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST, _col3 ASC NULLS LAST partition by: _col4 raw input shape: window functions: @@ -6262,11 +6263,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1, 2] - valueColumnNums: [3] + partitionColumns: 0:string, 1:string, 2:int + valueColumns: 3:double Statistics: Num rows: 13 Data size: 3003 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: double) Execution mode: vectorized, llap @@ -6322,11 +6323,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [2, 3] + partitionColumns: 1:string + valueColumns: 2:int, 3:double Statistics: Num rows: 13 Data size: 3003 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: int), _col3 (type: double) Reducer 3 @@ -6350,7 +6351,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -6485,11 +6486,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 5] + keyColumns: 2:string, 5:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [1] + partitionColumns: 2:string + valueColumns: 1:string Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string) Execution mode: vectorized, llap @@ -6530,7 +6531,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -6649,11 +6650,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -6694,7 +6695,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -6805,11 +6806,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -6850,7 +6851,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -6967,11 +6968,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -7012,7 +7013,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -7139,11 +7140,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -7184,7 +7185,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -7305,11 +7306,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -7350,7 +7351,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -7481,11 +7482,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -7526,7 +7527,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -7661,11 +7662,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -7706,7 +7707,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -7840,11 +7841,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -7885,7 +7886,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -8037,11 +8038,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -8066,7 +8067,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -8094,7 +8095,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -8216,6 +8217,7 @@ STAGE PLANS: #### A masked pattern was here #### Edges: Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 @@ -8232,11 +8234,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [7] + partitionColumns: 2:string + valueColumns: 7:double Statistics: Num rows: 26 Data size: 2756 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -8261,16 +8263,16 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + - allNative: false + allNative: true usesVectorUDFAdaptor: false vectorized: true rowBatchContext: dataColumnCount: 2 dataColumns: KEY.reducesinkkey0:string, VALUE._col6:double partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, bigint, double, double] + scratchColumnTypeNames: [double, double] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col6 (type: double) @@ -8289,7 +8291,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -8299,51 +8301,122 @@ STAGE PLANS: name: sum window function: GenericUDAFSumDouble window frame: RANGE PRECEDING(MAX)~CURRENT - window function definition - alias: min_window_1 - arguments: _col7 - name: min - window function: GenericUDAFMinEvaluator - window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX) window function definition alias: max_window_2 arguments: _col7 name: max window function: GenericUDAFMaxEvaluator window frame: RANGE PRECEDING(MAX)~CURRENT + PTF Vectorization: + className: VectorPTFOperator + evaluatorClasses: [VectorPTFEvaluatorDoubleSum, VectorPTFEvaluatorDoubleMax] + functionInputExpressions: [col 1:double, col 1:double] + functionNames: [sum, max] + keyInputColumns: [0] + native: true + nonKeyInputColumns: [1] + orderExpressions: [col 0:string] + outputColumns: [2, 3, 0, 1] + outputTypes: [double, double, string, double] + streamingColumns: [] + Statistics: Num rows: 26 Data size: 9724 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: sum_window_0 (type: double), max_window_2 (type: double), _col2 (type: string), _col7 (type: double) + outputColumnNames: sum_window_0, max_window_2, _col2, _col7 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [2, 3, 0, 1] + Statistics: Num rows: 26 Data size: 9724 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + key expressions: _col2 (type: string) + sort order: + + Map-reduce partition columns: _col2 (type: string) + Reduce Sink Vectorization: + className: VectorReduceSinkObjectHashOperator + keyColumns: 0:string + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true + partitionColumns: 0:string + valueColumns: 2:double, 3:double, 1:double + Statistics: Num rows: 26 Data size: 9724 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: sum_window_0 (type: double), max_window_2 (type: double), _col7 (type: double) + Reducer 3 + Execution mode: vectorized, llap + Reduce Vectorization: + enabled: true + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true + reduceColumnNullOrder: a + reduceColumnSortOrder: + + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + rowBatchContext: + dataColumnCount: 4 + dataColumns: KEY.reducesinkkey0:string, VALUE._col0:double, VALUE._col1:double, VALUE._col8:double + partitionColumnCount: 0 + scratchColumnTypeNames: [double, double, bigint, double, double] + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: double), VALUE._col1 (type: double), KEY.reducesinkkey0 (type: string), VALUE._col8 (type: double) + outputColumnNames: _col0, _col1, _col4, _col9 + Select Vectorization: + className: VectorSelectOperator + native: true + projectedOutputColumnNums: [1, 2, 0, 3] + Statistics: Num rows: 26 Data size: 10140 Basic stats: COMPLETE Column stats: COMPLETE + PTF Operator + Function definitions: + Input definition + input alias: ptf_0 + output shape: _col0: double, _col1: double, _col4: string, _col9: double + type: WINDOWING + Windowing table definition + input alias: ptf_1 + name: windowingtablefunction + order by: _col4 ASC NULLS FIRST + partition by: _col4 + raw input shape: + window functions: + window function definition + alias: min_window_1 + arguments: _col9 + name: min + window function: GenericUDAFMinEvaluator + window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX) window function definition alias: avg_window_3 - arguments: _col7 + arguments: _col9 name: avg window function: GenericUDAFAverageEvaluatorDouble window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX) window function definition alias: count_window_4 - arguments: _col7 + arguments: _col9 name: count window function: GenericUDAFCountEvaluator window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX) PTF Vectorization: className: VectorPTFOperator - evaluatorClasses: [VectorPTFEvaluatorDoubleSum, VectorPTFEvaluatorDoubleMin, VectorPTFEvaluatorDoubleMax, VectorPTFEvaluatorDoubleAvg, VectorPTFEvaluatorCount] - functionInputExpressions: [col 1:double, col 1:double, col 1:double, col 1:double, col 1:double] - functionNames: [sum, min, max, avg, count] + evaluatorClasses: [VectorPTFEvaluatorDoubleMin, VectorPTFEvaluatorDoubleAvg, VectorPTFEvaluatorCount] + functionInputExpressions: [col 3:double, col 3:double, col 3:double] + functionNames: [min, avg, count] keyInputColumns: [0] native: true - nonKeyInputColumns: [1] + nonKeyInputColumns: [1, 2, 3] orderExpressions: [col 0:string] - outputColumns: [2, 3, 4, 5, 6, 0, 1] - outputTypes: [double, double, double, double, bigint, string, double] + outputColumns: [4, 5, 6, 1, 2, 0, 3] + outputTypes: [double, double, bigint, double, double, string, double] streamingColumns: [] - Statistics: Num rows: 26 Data size: 9724 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 26 Data size: 10140 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - expressions: _col2 (type: string), round(sum_window_0, 2) (type: double), min_window_1 (type: double), max_window_2 (type: double), round(avg_window_3, 2) (type: double), count_window_4 (type: bigint) + expressions: _col4 (type: string), round(_col0, 2) (type: double), min_window_1 (type: double), _col1 (type: double), round(avg_window_3, 2) (type: double), count_window_4 (type: bigint) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Select Vectorization: className: VectorSelectOperator native: true - projectedOutputColumnNums: [0, 7, 3, 4, 8, 6] - selectExpressions: RoundWithNumDigitsDoubleToDouble(col 2, decimalPlaces 2) -> 7:double, RoundWithNumDigitsDoubleToDouble(col 5, decimalPlaces 2) -> 8:double + projectedOutputColumnNums: [0, 7, 4, 2, 8, 6] + selectExpressions: RoundWithNumDigitsDoubleToDouble(col 1, decimalPlaces 2) -> 7:double, RoundWithNumDigitsDoubleToDouble(col 5, decimalPlaces 2) -> 8:double Statistics: Num rows: 26 Data size: 3588 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false @@ -8455,11 +8528,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2, 1] - valueColumnNums: [5, 7] + partitionColumns: 2:string, 1:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -8484,7 +8557,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: true usesVectorUDFAdaptor: false @@ -8512,7 +8585,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2, _col1 raw input shape: window functions: @@ -8555,11 +8628,11 @@ STAGE PLANS: Map-reduce partition columns: _col2 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0, 1] - valueColumnNums: [4, 5, 2, 3] + partitionColumns: 0:string, 1:string + valueColumns: 4:double, 5:double, 2:int, 3:double Statistics: Num rows: 26 Data size: 12974 Basic stats: COMPLETE Column stats: COMPLETE value expressions: sum_window_0 (type: double), min_window_1 (type: double), _col5 (type: int), _col7 (type: double) Reducer 3 @@ -8567,7 +8640,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -8595,7 +8668,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST partition by: _col4, _col3 raw input shape: window functions: @@ -8729,12 +8802,12 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 10] + keyColumns: 2:string, 10:string keyExpressions: StringSubstrColStart(col 4:string, start 1) -> 10:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [4] + partitionColumns: 2:string + valueColumns: 4:string Statistics: Num rows: 26 Data size: 5252 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_type (type: string) Execution mode: vectorized, llap @@ -8759,7 +8832,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -8787,7 +8860,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: substr(_col4, 2) ASC NULLS FIRST + order by: substr(_col4, 2) ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -8918,11 +8991,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -8947,7 +9020,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -8975,7 +9048,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -9105,11 +9178,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 5] + keyColumns: 2:string, 5:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [1, 7] + partitionColumns: 2:string + valueColumns: 1:string, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -9134,7 +9207,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -9162,7 +9235,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -9292,11 +9365,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -9337,7 +9410,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -9446,11 +9519,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 5] + keyColumns: 2:string, 5:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [1, 7] + partitionColumns: 2:string + valueColumns: 1:string, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -9491,7 +9564,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -9603,12 +9676,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10] + keyColumns: 10:int keyExpressions: ConstantVectorExpression(val 0) -> 10:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [11] - valueColumnNums: [1, 7] + partitionColumns: 11:int + valueColumns: 1:string, 7:double Statistics: Num rows: 26 Data size: 3354 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_retailprice (type: double) Execution mode: vectorized, llap @@ -9698,10 +9771,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 5] + valueColumns: 2:double, 5:double Statistics: Num rows: 26 Data size: 3562 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: double), _col2 (type: double) Reducer 3 @@ -9709,7 +9782,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -9837,12 +9910,11 @@ STAGE PLANS: Map-reduce partition columns: 'Manufacturer#6' (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10, 5] + keyColumns: 10:string, 5:int keyExpressions: ConstantVectorExpression(val Manufacturer#6) -> 10:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [11] - valueColumnNums: [] + partitionColumns: 11:string Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -9866,7 +9938,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -9894,7 +9966,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: 'Manufacturer#6' raw input shape: window functions: @@ -10010,12 +10082,12 @@ STAGE PLANS: Map-reduce partition columns: 'Manufacturer#1' (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10, 1] + keyColumns: 10:string, 1:string keyExpressions: ConstantVectorExpression(val Manufacturer#1) -> 10:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [11] - valueColumnNums: [7] + partitionColumns: 11:string + valueColumns: 7:double Statistics: Num rows: 5 Data size: 1135 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -10056,7 +10128,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 'Manufacturer#1' raw input shape: window functions: @@ -10159,12 +10231,12 @@ STAGE PLANS: Map-reduce partition columns: 'm1' (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10] + keyColumns: 10:string keyExpressions: ConstantVectorExpression(val m1) -> 10:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [11] - valueColumnNums: [5] + partitionColumns: 11:string + valueColumns: 5:int Statistics: Num rows: 5 Data size: 510 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out index 5ea866b1f6..3c46272fc1 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out @@ -89,11 +89,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 7] + keyColumns: 2:string, 7:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 2860 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -134,7 +134,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -282,11 +282,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 7] + keyColumns: 2:string, 7:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 2860 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -311,7 +311,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -339,7 +339,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -476,11 +476,10 @@ STAGE PLANS: Map-reduce partition columns: t (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 6, 7, 1, 4] + keyColumns: 0:tinyint, 6:boolean, 7:string, 1:smallint, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:tinyint Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -520,7 +519,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col6 ASC NULLS FIRST, _col7 ASC NULLS FIRST, _col1 ASC NULLS FIRST, _col4 DESC NULLS LAST + order by: _col6 ASC NULLS LAST, _col7 ASC NULLS LAST, _col1 ASC NULLS LAST, _col4 DESC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -699,11 +698,10 @@ STAGE PLANS: Map-reduce partition columns: si (type: smallint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 2, 7] + keyColumns: 1:smallint, 2:int, 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [] + partitionColumns: 1:smallint Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -743,7 +741,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col7 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col7 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -922,11 +920,10 @@ STAGE PLANS: Map-reduce partition columns: b (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3, 1, 7, 5] + keyColumns: 3:bigint, 1:smallint, 7:string, 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [3] - valueColumnNums: [] + partitionColumns: 3:bigint Statistics: Num rows: 1 Data size: 204 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -966,7 +963,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col7 ASC NULLS FIRST, _col5 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST, _col7 ASC NULLS LAST, _col5 ASC NULLS LAST partition by: _col3 raw input shape: window functions: @@ -1145,11 +1142,11 @@ STAGE PLANS: Map-reduce partition columns: f (type: float) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 3] + keyColumns: 4:float, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [7] + partitionColumns: 4:float + valueColumns: 7:string Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -1190,7 +1187,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST partition by: _col4 raw input shape: window functions: @@ -1369,10 +1366,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_type (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 4] + keyColumns: 2:string, 4:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [7] + valueColumns: 7:double Statistics: Num rows: 26 Data size: 5460 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -1397,7 +1394,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: za reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1425,7 +1422,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col2, _col4 raw input shape: window functions: @@ -1483,32 +1480,32 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@part #### A masked pattern was here #### p_mfgr avg_window_0 -Manufacturer#1 1753.76 -Manufacturer#3 1410.39 -Manufacturer#4 1620.67 -Manufacturer#5 1018.1 -Manufacturer#5 1788.73 -Manufacturer#1 1173.15 -Manufacturer#1 1173.15 -Manufacturer#2 1800.7 -Manufacturer#2 1690.68 -Manufacturer#3 1922.98 -Manufacturer#4 1844.92 -Manufacturer#4 1290.35 -Manufacturer#5 1789.69 +Manufacturer#1 1632.66 Manufacturer#1 1414.42 +Manufacturer#2 1800.7 Manufacturer#2 1701.6 Manufacturer#3 1190.27 Manufacturer#3 1337.29 -Manufacturer#4 1206.26 -Manufacturer#5 1611.66 -Manufacturer#1 1632.66 +Manufacturer#1 1173.15 +Manufacturer#1 1173.15 +Manufacturer#4 1290.35 +Manufacturer#5 1464.48 +Manufacturer#5 1789.69 +Manufacturer#1 1753.76 Manufacturer#1 1602.59 +Manufacturer#2 1690.68 Manufacturer#2 2031.98 -Manufacturer#2 1698.66 +Manufacturer#3 1410.39 Manufacturer#3 1671.68 Manufacturer#4 1375.42 -Manufacturer#5 1464.48 +Manufacturer#5 1788.73 +Manufacturer#2 1698.66 +Manufacturer#3 1922.98 +Manufacturer#4 1844.92 +Manufacturer#4 1620.67 +Manufacturer#4 1206.26 +Manufacturer#5 1018.1 +Manufacturer#5 1611.66 PREHOOK: query: explain vectorization detail select p_mfgr, avg(p_retailprice) over(partition by p_mfgr order by p_type,p_mfgr rows between unbounded preceding and current row) from part PREHOOK: type: QUERY @@ -1546,11 +1543,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 4] + keyColumns: 2:string, 4:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [7] + partitionColumns: 2:string + valueColumns: 7:double Statistics: Num rows: 26 Data size: 5460 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_retailprice (type: double) Execution mode: vectorized, llap @@ -1575,7 +1572,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1603,7 +1600,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1744,11 +1741,11 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 2] + keyColumns: 8:timestamp, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [7] + partitionColumns: 8:timestamp + valueColumns: 7:string Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -1773,7 +1770,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1801,7 +1798,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -1987,11 +1984,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 7] + keyColumns: 2:string, 7:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 2860 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -2032,7 +2029,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2200,32 +2197,32 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@part #### A masked pattern was here #### p_mfgr avg_window_0 -Manufacturer#1 1753.76 -Manufacturer#3 1410.39 -Manufacturer#4 1620.67 -Manufacturer#5 1018.1 -Manufacturer#5 1788.73 -Manufacturer#1 1173.15 -Manufacturer#1 1173.15 -Manufacturer#2 1800.7 -Manufacturer#2 1690.68 -Manufacturer#3 1922.98 -Manufacturer#4 1844.92 -Manufacturer#4 1290.35 -Manufacturer#5 1789.69 +Manufacturer#1 1632.66 Manufacturer#1 1414.42 +Manufacturer#2 1800.7 Manufacturer#2 1701.6 Manufacturer#3 1190.27 Manufacturer#3 1337.29 -Manufacturer#4 1206.26 -Manufacturer#5 1611.66 -Manufacturer#1 1632.66 +Manufacturer#1 1173.15 +Manufacturer#1 1173.15 +Manufacturer#4 1290.35 +Manufacturer#5 1464.48 +Manufacturer#5 1789.69 +Manufacturer#1 1753.76 Manufacturer#1 1602.59 +Manufacturer#2 1690.68 Manufacturer#2 2031.98 -Manufacturer#2 1698.66 +Manufacturer#3 1410.39 Manufacturer#3 1671.68 Manufacturer#4 1375.42 -Manufacturer#5 1464.48 +Manufacturer#5 1788.73 +Manufacturer#2 1698.66 +Manufacturer#3 1922.98 +Manufacturer#4 1844.92 +Manufacturer#4 1620.67 +Manufacturer#4 1206.26 +Manufacturer#5 1018.1 +Manufacturer#5 1611.66 PREHOOK: query: select p_mfgr, avg(p_retailprice) over(partition by p_mfgr order by p_type,p_mfgr rows between unbounded preceding and current row) from part PREHOOK: type: QUERY PREHOOK: Input: default@part diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out index d9b62a905b..729554f0f2 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby.q.out @@ -59,10 +59,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 4] + valueColumns: 2:int, 4:boolean Statistics: Num rows: 18 Data size: 1581 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: boolean) Execution mode: vectorized, llap @@ -112,10 +112,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [6] + keyColumns: 6:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:int Statistics: Num rows: 9174 Data size: 671296 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -158,6 +158,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean) Statistics: Num rows: 3 Data size: 60 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -202,12 +205,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3, 6] + keyColumns: 3:int, 6:double keyExpressions: ConstantVectorExpression(val 0) -> 3:int, DoubleColDivideDoubleColumn(col 4:double, col 5:double)(children: CastLongToDouble(col 1:bigint) -> 4:double, CastLongToDouble(col 2:bigint) -> 5:double) -> 6:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [1, 2] + partitionColumns: 7:int + valueColumns: 1:bigint, 2:bigint Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: bigint) Reducer 4 @@ -215,7 +218,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -243,7 +246,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS FIRST + order by: (UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out index 55908e5f34..6660d737cd 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_gby2.q.out @@ -62,10 +62,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 7 Data size: 651 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap @@ -129,19 +129,18 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:int, 1:bigint keyExpressions: ConstantVectorExpression(val 0) -> 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [3] - valueColumnNums: [] + partitionColumns: 3:int Statistics: Num rows: 7 Data size: 651 Basic stats: COMPLETE Column stats: COMPLETE Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -169,7 +168,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -233,11 +232,11 @@ POSTHOOK: Input: default@cbo_t3 #### A masked pattern was here #### return_rank 1 -2 -2 -2 -5 -5 +1 +1 +4 +4 +6 7 PREHOOK: query: explain vectorization detail select avg(cast(ws.key as int)) over (partition by min(ws.value) order by sum(ws.c_int)) as return_rank @@ -304,10 +303,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:string, 2:bigint Statistics: Num rows: 6 Data size: 1176 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string), _col2 (type: bigint) Execution mode: vectorized, llap @@ -363,11 +362,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 2] + keyColumns: 1:string, 2:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [0] + partitionColumns: 1:string + valueColumns: 0:int Statistics: Num rows: 6 Data size: 1176 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Reducer 3 @@ -375,7 +374,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -403,7 +402,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -540,10 +539,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3, 4, 5] + valueColumns: 2:double, 3:double, 4:int, 5:double Statistics: Num rows: 10 Data size: 1980 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: double), _col3 (type: double), _col4 (type: int), _col5 (type: double) Execution mode: vectorized, llap @@ -599,11 +598,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 2] + keyColumns: 0:string, 2:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [1, 3, 4, 5] + partitionColumns: 0:string + valueColumns: 1:string, 3:double, 4:int, 5:double Statistics: Num rows: 10 Data size: 1980 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string), _col3 (type: double), _col4 (type: int), _col5 (type: double) Reducer 3 @@ -678,12 +677,12 @@ STAGE PLANS: Map-reduce partition columns: lower(_col1) (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 3] + keyColumns: 7:string, 3:double keyExpressions: StringLower(col 2:string) -> 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [6, 2, 4, 5] + partitionColumns: 8:string + valueColumns: 6:int, 2:string, 4:int, 5:double Statistics: Num rows: 10 Data size: 1980 Basic stats: COMPLETE Column stats: COMPLETE value expressions: rank_window_0 (type: int), _col1 (type: string), _col4 (type: int), _col5 (type: double) Reducer 4 @@ -758,11 +757,11 @@ STAGE PLANS: Map-reduce partition columns: _col5 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 5] + keyColumns: 4:int, 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [6, 2] + partitionColumns: 4:int + valueColumns: 6:int, 2:int Statistics: Num rows: 10 Data size: 1005 Basic stats: COMPLETE Column stats: COMPLETE value expressions: dense_rank_window_1 (type: int), _col0 (type: int) Reducer 5 @@ -899,10 +898,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [1] + keyColumns: 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 4] + valueColumns: 2:int, 4:boolean Statistics: Num rows: 18 Data size: 1581 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: boolean) Execution mode: vectorized, llap @@ -952,10 +951,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [6] + keyColumns: 6:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:int Statistics: Num rows: 9174 Data size: 671296 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -998,6 +997,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean) Statistics: Num rows: 3 Data size: 60 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1042,12 +1044,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3, 6] + keyColumns: 3:int, 6:double keyExpressions: ConstantVectorExpression(val 0) -> 3:int, DoubleColDivideDoubleColumn(col 4:double, col 5:double)(children: CastLongToDouble(col 1:bigint) -> 4:double, CastLongToDouble(col 2:bigint) -> 5:double) -> 6:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [1, 2] + partitionColumns: 7:int + valueColumns: 1:bigint, 2:bigint Statistics: Num rows: 2 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: bigint) Reducer 4 @@ -1055,7 +1057,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1083,7 +1085,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS FIRST + order by: (UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out index 157d184a0f..937ca4f209 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_multipartitioning.q.out @@ -81,11 +81,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 1] + keyColumns: 7:string, 1:smallint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [3] + partitionColumns: 7:string + valueColumns: 3:bigint Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: b (type: bigint) Execution mode: vectorized, llap @@ -110,7 +110,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -138,7 +138,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -10256,11 +10256,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 9] + keyColumns: 7:string, 9:decimal(4,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [3, 8] + partitionColumns: 7:string + valueColumns: 3:bigint, 8:timestamp Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: NONE value expressions: b (type: bigint), ts (type: timestamp) Execution mode: vectorized, llap @@ -10352,11 +10352,11 @@ STAGE PLANS: Map-reduce partition columns: _col7 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 3] + keyColumns: 0:string, 3:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [4, 2] + partitionColumns: 0:string + valueColumns: 4:int, 2:bigint Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: NONE value expressions: rank_window_0 (type: int), _col3 (type: bigint) Reducer 3 @@ -10534,10 +10534,10 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [7] + keyColumns: 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 4] + valueColumns: 1:smallint, 2:int, 4:float Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: si (type: smallint), i (type: int), f (type: float) Execution mode: vectorized, llap @@ -10627,10 +10627,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: smallint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [1] + keyColumns: 1:smallint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 3, 0] + valueColumns: 4:bigint, 3:float, 0:string Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: sum_window_0 (type: bigint), _col4 (type: float), _col7 (type: string) Reducer 3 @@ -10801,11 +10801,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 6] + keyColumns: 7:string, 6:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [1, 10] + partitionColumns: 7:string + valueColumns: 1:smallint, 10:binary Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE value expressions: si (type: smallint), bin (type: binary) Execution mode: vectorized, llap @@ -10830,7 +10830,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: true usesVectorUDFAdaptor: false @@ -10858,7 +10858,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col6 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -10897,11 +10897,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: smallint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 3] + keyColumns: 2:smallint, 3:binary native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [4, 0] + partitionColumns: 2:smallint + valueColumns: 4:int, 0:string Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: NONE value expressions: rank_window_0 (type: int), _col7 (type: string) Reducer 3 @@ -11074,10 +11074,10 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [2] + keyColumns: 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 7] + valueColumns: 4:float, 7:string Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: f (type: float), s (type: string) Execution mode: vectorized, llap @@ -11167,12 +11167,12 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 1] + keyColumns: 4:int, 1:float keyExpressions: ConstantVectorExpression(val 0) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [3, 2] + partitionColumns: 5:int + valueColumns: 3:double, 2:string Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: sum_window_0 (type: double), _col7 (type: string) Reducer 3 @@ -11180,7 +11180,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -11208,7 +11208,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -11354,11 +11354,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 9] + keyColumns: 7:string, 9:decimal(4,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [1, 4] + partitionColumns: 7:string + valueColumns: 1:smallint, 4:float Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE value expressions: si (type: smallint), f (type: float) Execution mode: vectorized, llap @@ -11383,7 +11383,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: true usesVectorUDFAdaptor: false @@ -11411,7 +11411,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col9 ASC NULLS FIRST + order by: _col9 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -11450,11 +11450,11 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: smallint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 3] + keyColumns: 2:smallint, 3:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [4, 0] + partitionColumns: 2:smallint + valueColumns: 4:int, 0:string Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE value expressions: rank_window_0 (type: int), _col7 (type: string) Reducer 3 @@ -11462,7 +11462,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -11490,7 +11490,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out index 1871216134..2d3ab96db3 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out @@ -89,11 +89,10 @@ STAGE PLANS: Map-reduce partition columns: 0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:int keyExpressions: ConstantVectorExpression(val 0) -> 3:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -233,11 +232,11 @@ STAGE PLANS: Map-reduce partition columns: d (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5, 9] + keyColumns: 5:double, 9:decimal(4,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [7] + partitionColumns: 5:double + valueColumns: 7:string Statistics: Num rows: 1 Data size: 304 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -262,7 +261,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -290,7 +289,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col9 ASC NULLS FIRST + order by: _col9 ASC NULLS LAST partition by: _col5 raw input shape: window functions: @@ -491,11 +490,11 @@ STAGE PLANS: Map-reduce partition columns: bin (type: binary) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10, 5, 2] + keyColumns: 10:binary, 5:double, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [10] - valueColumnNums: [7] + partitionColumns: 10:binary + valueColumns: 7:string Statistics: Num rows: 1 Data size: 340 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -536,7 +535,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST, _col2 DESC NULLS LAST + order by: _col5 ASC NULLS LAST, _col2 DESC NULLS LAST partition by: _col10 raw input shape: window functions: @@ -715,11 +714,10 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 7, 9] + keyColumns: 2:int, 7:string, 9:decimal(4,2) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [] + partitionColumns: 2:int Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -759,7 +757,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col9 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col2 ASC NULLS LAST, _col9 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -938,11 +936,11 @@ STAGE PLANS: Map-reduce partition columns: d (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5, 4] + keyColumns: 5:double, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [0, 7] + partitionColumns: 5:double + valueColumns: 0:tinyint, 7:string Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE value expressions: t (type: tinyint), s (type: string) Execution mode: vectorized, llap @@ -967,7 +965,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -995,7 +993,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col5 raw input shape: window functions: @@ -1196,11 +1194,10 @@ STAGE PLANS: Map-reduce partition columns: bo (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [6, 7] + keyColumns: 6:boolean, 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [6] - valueColumnNums: [] + partitionColumns: 6:boolean Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1240,7 +1237,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST partition by: _col6 raw input shape: window functions: @@ -1428,12 +1425,12 @@ STAGE PLANS: Map-reduce partition columns: UDFToByte(10) (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [12, 7] + keyColumns: 12:tinyint, 7:string keyExpressions: ConstantVectorExpression(val 10) -> 12:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [13] - valueColumnNums: [2] + partitionColumns: 13:tinyint + valueColumns: 2:int Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: i (type: int) Execution mode: vectorized, llap @@ -1458,7 +1455,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1486,7 +1483,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST partition by: UDFToByte(10) raw input shape: window functions: @@ -1622,11 +1619,10 @@ STAGE PLANS: Map-reduce partition columns: a (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:int Statistics: Num rows: 15 Data size: 120 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1666,7 +1662,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1737,11 +1733,11 @@ a b first_value_window_0 first_value_window_1 first_value_window_2 first_value_w 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL -2 NULL NULL NULL NULL NULL -2 NULL NULL 1 NULL 1 -2 1 NULL 1 NULL 1 -2 2 1 1 NULL 1 -2 3 2 2 NULL 1 +2 1 1 1 1 1 +2 2 1 1 1 1 +2 3 2 2 1 1 +2 NULL 3 3 1 1 +2 NULL NULL NULL 1 1 3 1 1 1 1 1 3 2 1 1 1 1 3 3 2 2 1 1 @@ -1794,11 +1790,10 @@ STAGE PLANS: Map-reduce partition columns: a (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:int Statistics: Num rows: 15 Data size: 120 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1966,11 +1961,10 @@ STAGE PLANS: Map-reduce partition columns: a (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:int Statistics: Num rows: 15 Data size: 120 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -2010,7 +2004,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -2081,11 +2075,11 @@ a b last_value_window_0 last_value_window_1 last_value_window_2 last_value_windo 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL -2 NULL NULL NULL NULL NULL -2 NULL 1 1 1 1 2 1 2 2 2 2 2 2 3 3 3 3 -2 3 3 3 3 3 +2 3 NULL 3 NULL 3 +2 NULL NULL 3 NULL 3 +2 NULL NULL NULL NULL 3 3 1 2 2 2 2 3 2 3 3 3 3 3 3 4 4 4 4 @@ -2138,11 +2132,10 @@ STAGE PLANS: Map-reduce partition columns: a (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:int, 1:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:int Statistics: Num rows: 15 Data size: 120 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs diff --git ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out index 7b6fa66b06..73f64b89fc 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out @@ -89,11 +89,10 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 7, 3] + keyColumns: 2:int, 7:string, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [] + partitionColumns: 2:int Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -117,7 +116,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aza + reduceColumnNullOrder: azz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -145,7 +144,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS LAST, _col3 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col3 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -256,11 +255,10 @@ STAGE PLANS: Map-reduce partition columns: d (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5, 7, 4] + keyColumns: 5:double, 7:string, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [] + partitionColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -284,7 +282,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: aza reduceColumnSortOrder: ++- allNative: false usesVectorUDFAdaptor: false @@ -312,7 +310,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col4 DESC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col4 DESC NULLS FIRST partition by: _col5 raw input shape: window functions: @@ -378,13 +376,13 @@ POSTHOOK: Input: default@over10k_n21 d s f sum_window_0 NULL alice ichabod NULL NULL NULL calvin miller NULL NULL -0.01 NULL NULL NULL -0.01 NULL NULL NULL 0.01 calvin miller 8.39 8.390000343322754 -0.02 NULL NULL NULL +0.01 NULL NULL 8.390000343322754 +0.01 NULL NULL 8.390000343322754 0.02 holly polk 5.29 5.289999961853027 0.02 wendy quirinius 25.5 30.789999961853027 0.02 yuri laertes 37.59 68.38000011444092 +0.02 NULL NULL 68.38000011444092 0.03 nick steinbeck 79.24 79.23999786376953 PREHOOK: query: explain vectorization detail select ts, s, f, sum(f) over (partition by ts order by f asc nulls first range between current row and unbounded following) from over10k_n21 limit 10 @@ -423,11 +421,11 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 4] + keyColumns: 8:timestamp, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [7] + partitionColumns: 8:timestamp + valueColumns: 7:string Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -556,11 +554,10 @@ STAGE PLANS: Map-reduce partition columns: t (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 7, 5] + keyColumns: 0:tinyint, 7:string, 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:tinyint Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -600,7 +597,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col5 DESC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col5 DESC NULLS FIRST partition by: _col0 raw input shape: window functions: @@ -688,11 +685,11 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 7] + keyColumns: 8:timestamp, 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [2] + partitionColumns: 8:timestamp + valueColumns: 2:int Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: i (type: int) Execution mode: vectorized, llap @@ -857,11 +854,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 2] + keyColumns: 7:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [5] + partitionColumns: 7:string + valueColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: double) Execution mode: vectorized, llap @@ -1021,11 +1018,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 2] + keyColumns: 7:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [5] + partitionColumns: 7:string + valueColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: double) Execution mode: vectorized, llap @@ -1185,11 +1182,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 2] + keyColumns: 7:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [5] + partitionColumns: 7:string + valueColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: double) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out index 782bd9be2f..1126717b64 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_range_multiorder.q.out @@ -81,11 +81,11 @@ STAGE PLANS: Map-reduce partition columns: si (type: smallint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 2, 3] + keyColumns: 1:smallint, 2:int, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [0] + partitionColumns: 1:smallint + valueColumns: 0:tinyint Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: NONE value expressions: t (type: tinyint) Execution mode: vectorized, llap @@ -110,7 +110,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: azz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -138,7 +138,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col3 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -339,11 +339,10 @@ STAGE PLANS: Map-reduce partition columns: si (type: smallint), bo (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 6, 2, 4] + keyColumns: 1:smallint, 6:boolean, 2:int, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1, 6] - valueColumnNums: [] + partitionColumns: 1:smallint, 6:boolean Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -383,7 +382,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col4 DESC NULLS LAST + order by: _col2 ASC NULLS LAST, _col4 DESC NULLS LAST partition by: _col1, _col6 raw input shape: window functions: @@ -561,11 +560,10 @@ STAGE PLANS: Map-reduce partition columns: si (type: smallint), bo (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 6, 2, 4] + keyColumns: 1:smallint, 6:boolean, 2:int, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1, 6] - valueColumnNums: [] + partitionColumns: 1:smallint, 6:boolean Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -605,7 +603,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col4 DESC NULLS LAST + order by: _col2 ASC NULLS LAST, _col4 DESC NULLS LAST partition by: _col1, _col6 raw input shape: window functions: @@ -783,10 +781,10 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [7] + keyColumns: 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:smallint, 2:int Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: si (type: smallint), i (type: int) Execution mode: vectorized, llap @@ -10932,11 +10930,10 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 1, 2] + keyColumns: 7:string, 1:smallint, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [] + partitionColumns: 7:string Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -10960,7 +10957,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: azz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -10988,7 +10985,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -11189,11 +11186,10 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 1, 2] + keyColumns: 7:string, 1:smallint, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [] + partitionColumns: 7:string Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -11217,7 +11213,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: azz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -11245,7 +11241,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -11446,11 +11442,10 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 1, 2] + keyColumns: 7:string, 1:smallint, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [] + partitionColumns: 7:string Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -11474,7 +11469,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaz + reduceColumnNullOrder: azz reduceColumnSortOrder: ++- allNative: false usesVectorUDFAdaptor: false @@ -11502,7 +11497,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col2 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col2 DESC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -11703,11 +11698,10 @@ STAGE PLANS: Map-reduce partition columns: si (type: smallint), bo (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 6, 2, 4] + keyColumns: 1:smallint, 6:boolean, 2:int, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1, 6] - valueColumnNums: [] + partitionColumns: 1:smallint, 6:boolean Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -11731,7 +11725,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaz + reduceColumnNullOrder: aazz reduceColumnSortOrder: +++- allNative: false usesVectorUDFAdaptor: false @@ -11759,7 +11753,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col4 DESC NULLS LAST + order by: _col2 ASC NULLS LAST, _col4 DESC NULLS LAST partition by: _col1, _col6 raw input shape: window functions: @@ -11960,11 +11954,10 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 6, 3] + keyColumns: 2:int, 6:boolean, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [] + partitionColumns: 2:int Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -12183,12 +12176,12 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 12] + keyColumns: 2:int, 12:char(12) keyExpressions: CastStringGroupToChar(col 7:string, maxLength 12) -> 12:char(12) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [7] + partitionColumns: 2:int + valueColumns: 7:string Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -12408,12 +12401,12 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 12] + keyColumns: 2:int, 12:varchar(12) keyExpressions: CastStringGroupToVarChar(col 7:string, maxLength 12) -> 12:varchar(12) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [7] + partitionColumns: 2:int + valueColumns: 7:string Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out index d2670af530..a926bf9d47 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_rank.q.out @@ -81,11 +81,11 @@ STAGE PLANS: Map-reduce partition columns: f (type: float) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [4, 0] + keyColumns: 4:float, 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [4] - valueColumnNums: [7] + partitionColumns: 4:float + valueColumns: 7:string Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -110,7 +110,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -138,7 +138,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col4 raw input shape: window functions: @@ -340,11 +340,10 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 2, 7] + keyColumns: 8:timestamp, 2:int, 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [] + partitionColumns: 8:timestamp Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -368,7 +367,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaz + reduceColumnNullOrder: azz reduceColumnSortOrder: ++- allNative: false usesVectorUDFAdaptor: false @@ -396,7 +395,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col7 DESC NULLS LAST + order by: _col2 ASC NULLS LAST, _col7 DESC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -598,11 +597,10 @@ STAGE PLANS: Map-reduce partition columns: bo (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [6, 3, 7] + keyColumns: 6:boolean, 3:bigint, 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [6] - valueColumnNums: [] + partitionColumns: 6:boolean Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -642,7 +640,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST, _col7 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST, _col7 ASC NULLS LAST partition by: _col6 raw input shape: window functions: @@ -821,11 +819,11 @@ STAGE PLANS: Map-reduce partition columns: dec (type: decimal(4,2)) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [9, 4] + keyColumns: 9:decimal(4,2), 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [9] - valueColumnNums: [7] + partitionColumns: 9:decimal(4,2) + valueColumns: 7:string Statistics: Num rows: 1 Data size: 300 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -866,7 +864,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col9 raw input shape: window functions: @@ -1082,10 +1080,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [8, 9] + valueColumns: 8:timestamp, 9:decimal(4,2) Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: decimal(4,2)) Execution mode: vectorized, llap @@ -1135,10 +1133,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1173,12 +1170,15 @@ STAGE PLANS: sort order: ++ Map-reduce partition columns: _col1 (type: timestamp) Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1206,7 +1206,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -1383,10 +1383,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [8, 9] + valueColumns: 8:timestamp, 9:decimal(4,2) Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: timestamp), _col2 (type: decimal(4,2)) Execution mode: vectorized, llap @@ -1436,10 +1436,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1475,6 +1474,9 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: timestamp) Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: decimal(4,2)) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1686,10 +1688,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [8, 9] + valueColumns: 8:timestamp, 9:decimal(4,2) Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: timestamp), _col3 (type: decimal(4,2)) Execution mode: vectorized, llap @@ -1739,10 +1741,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [3] + keyColumns: 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1777,12 +1778,15 @@ STAGE PLANS: sort order: ++ Map-reduce partition columns: _col2 (type: timestamp) Statistics: Num rows: 1 Data size: 180 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1810,7 +1814,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out index cf8f348148..ea2544fbe2 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_streaming.q.out @@ -83,11 +83,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [] + partitionColumns: 2:string Statistics: Num rows: 26 Data size: 5694 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -111,7 +110,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -139,7 +138,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -257,7 +256,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -285,7 +284,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -465,7 +464,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -493,7 +492,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -689,7 +688,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -753,48 +752,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@sb #### A masked pattern was here #### sb.ctinyint sb.cdouble sb.r -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 +NULL -16379.0 1 +NULL -16310.0 2 +NULL -16309.0 3 +NULL -16307.0 4 PREHOOK: query: drop table if exists sD PREHOOK: type: DROPTABLE POSTHOOK: query: drop table if exists sD @@ -868,7 +829,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -896,7 +857,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -998,45 +959,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@sd #### A masked pattern was here #### sd.ctinyint sd.cdouble sd.r -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 +NULL -16379.0 1 +NULL -16310.0 2 +NULL -16309.0 3 +NULL -16307.0 4 diff --git ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out index 584453c93e..cdb90ce155 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_windowspec.q.out @@ -81,11 +81,10 @@ STAGE PLANS: Map-reduce partition columns: i (type: int) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 7, 3] + keyColumns: 2:int, 7:string, 3:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [] + partitionColumns: 2:int Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -109,7 +108,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: azz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -137,7 +136,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col3 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col3 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -338,11 +337,10 @@ STAGE PLANS: Map-reduce partition columns: d (type: double) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [5, 7, 4] + keyColumns: 5:double, 7:string, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [5] - valueColumnNums: [] + partitionColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -366,7 +364,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: azz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -394,7 +392,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col4 ASC NULLS LAST partition by: _col5 raw input shape: window functions: @@ -595,11 +593,11 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 4] + keyColumns: 8:timestamp, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [7] + partitionColumns: 8:timestamp + valueColumns: 7:string Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: s (type: string) Execution mode: vectorized, llap @@ -640,7 +638,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -818,11 +816,10 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 7, 4] + keyColumns: 8:timestamp, 7:string, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [] + partitionColumns: 8:timestamp Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -862,7 +859,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST, _col4 ASC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -1040,11 +1037,10 @@ STAGE PLANS: Map-reduce partition columns: t (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 7, 5] + keyColumns: 0:tinyint, 7:string, 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:tinyint Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1084,7 +1080,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col7 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1262,11 +1258,11 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 7] + keyColumns: 8:timestamp, 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [2] + partitionColumns: 8:timestamp + valueColumns: 2:int Statistics: Num rows: 1 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: i (type: int) Execution mode: vectorized, llap @@ -1291,7 +1287,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1319,7 +1315,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col7 ASC NULLS FIRST + order by: _col7 ASC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -1520,11 +1516,10 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 4] + keyColumns: 8:timestamp, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [] + partitionColumns: 8:timestamp Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1548,7 +1543,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -1576,7 +1571,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -1777,11 +1772,10 @@ STAGE PLANS: Map-reduce partition columns: ts (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 4] + keyColumns: 8:timestamp, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [8] - valueColumnNums: [] + partitionColumns: 8:timestamp Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized, llap LLAP IO: no inputs @@ -1821,7 +1815,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col4 ASC NULLS FIRST + order by: _col4 ASC NULLS LAST partition by: _col8 raw input shape: window functions: @@ -1999,11 +1993,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 2] + keyColumns: 7:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [5] + partitionColumns: 7:string + valueColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: double) Execution mode: vectorized, llap @@ -2028,7 +2022,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2056,7 +2050,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -2165,11 +2159,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 2] + keyColumns: 7:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [5] + partitionColumns: 7:string + valueColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: double) Execution mode: vectorized, llap @@ -2194,7 +2188,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2222,7 +2216,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col7 raw input shape: window functions: @@ -2331,11 +2325,11 @@ STAGE PLANS: Map-reduce partition columns: s (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [7, 2] + keyColumns: 7:string, 2:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [7] - valueColumnNums: [5] + partitionColumns: 7:string + valueColumns: 5:double Statistics: Num rows: 1 Data size: 196 Basic stats: COMPLETE Column stats: NONE value expressions: d (type: double) Execution mode: vectorized, llap @@ -2360,7 +2354,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2388,7 +2382,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col7 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out index 78df4409f3..e924032099 100644 --- ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out +++ ql/src/test/results/clientpositive/llap/vector_windowing_windowspec4.q.out @@ -80,11 +80,10 @@ STAGE PLANS: Map-reduce partition columns: type (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:string, 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [1] - valueColumnNums: [] + partitionColumns: 1:string Statistics: Num rows: 3 Data size: 267 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: no inputs @@ -124,7 +123,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vectorization_0.q.out ql/src/test/results/clientpositive/llap/vectorization_0.q.out index c7100969ad..5e95f39213 100644 --- ql/src/test/results/clientpositive/llap/vectorization_0.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_0.q.out @@ -63,10 +63,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:tinyint, 1:tinyint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: tinyint), _col1 (type: tinyint), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized, llap @@ -119,10 +118,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:tinyint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: tinyint), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -130,7 +129,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -244,10 +243,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -300,17 +298,16 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -575,10 +572,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:bigint, 1:bigint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized, llap @@ -631,10 +627,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:bigint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -642,7 +638,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -756,10 +752,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap @@ -812,17 +807,16 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1087,10 +1081,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:float, 1:float, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: float), _col1 (type: float), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized, llap @@ -1143,10 +1136,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:float, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: float), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -1154,7 +1147,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1268,10 +1261,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:double Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double) Execution mode: vectorized, llap @@ -1324,17 +1316,16 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1646,10 +1637,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double, 4:bigint, 5:double, 6:tinyint Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: bigint), _col5 (type: double), _col6 (type: tinyint) Execution mode: vectorized, llap @@ -31077,7 +31067,7 @@ STAGE PLANS: Statistics: Num rows: 300 Data size: 23550 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 300 Data size: 23550 Basic stats: COMPLETE Column stats: COMPLETE tag: -1 diff --git ql/src/test/results/clientpositive/llap/vectorization_1.q.out ql/src/test/results/clientpositive/llap/vectorization_1.q.out index d2fcbc250a..a0b9d237de 100644 --- ql/src/test/results/clientpositive/llap/vectorization_1.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_1.q.out @@ -97,10 +97,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 0:double, 1:double, 2:bigint, 3:double, 4:tinyint, 5:int, 6:double, 7:double, 8:bigint, 9:bigint Statistics: Num rows: 1 Data size: 72 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: tinyint), _col5 (type: int), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_12.q.out ql/src/test/results/clientpositive/llap/vectorization_12.q.out index b90a7fe0be..61ff039ffb 100644 --- ql/src/test/results/clientpositive/llap/vectorization_12.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_12.q.out @@ -124,10 +124,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double), _col1 (type: bigint), _col2 (type: string), _col3 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:double, 1:bigint, 2:string, 3:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 5, 6, 7, 8, 9, 10] + valueColumns: 4:bigint, 5:double, 6:double, 7:double, 8:bigint, 9:bigint, 10:double Statistics: Num rows: 1 Data size: 170 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: bigint), _col10 (type: double) Execution mode: vectorized, llap @@ -191,10 +191,10 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:double, 1:bigint, 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 11, 12, 4, 13, 14, 19, 15, 20, 22, 24, 9, 26, 25, 21, 27] + valueColumns: 3:boolean, 11:double, 12:bigint, 4:bigint, 13:bigint, 14:double, 19:double, 15:double, 20:double, 22:double, 24:decimal(22,2), 9:bigint, 26:double, 25:double, 21:double, 27:double Statistics: Num rows: 1 Data size: 346 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: boolean), _col4 (type: double), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: decimal(22,2)), _col14 (type: bigint), _col15 (type: double), _col17 (type: double), _col18 (type: double), _col19 (type: double) Reducer 3 @@ -202,7 +202,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vectorization_13.q.out ql/src/test/results/clientpositive/llap/vectorization_13.q.out index efcf29261c..398cb56915 100644 --- ql/src/test/results/clientpositive/llap/vectorization_13.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_13.q.out @@ -126,10 +126,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4] + keyColumns: 0:boolean, 1:tinyint, 2:timestamp, 3:float, 4:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 14] + valueColumns: 5:tinyint, 6:double, 7:double, 8:double, 9:bigint, 10:double, 11:double, 12:bigint, 13:float, 14:tinyint Statistics: Num rows: 2730 Data size: 510974 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: tinyint), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: bigint), _col10 (type: double), _col11 (type: double), _col12 (type: bigint), _col13 (type: float), _col14 (type: tinyint) Execution mode: vectorized, llap @@ -193,10 +193,9 @@ STAGE PLANS: sort order: +++++++++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3, 4, 15, 5, 17, 6, 20, 19, 21, 22, 23, 24, 27, 28, 25, 13, 31, 14] + keyColumns: 0:boolean, 1:tinyint, 2:timestamp, 3:float, 4:string, 15:tinyint, 5:tinyint, 17:tinyint, 6:double, 20:double, 19:double, 21:float, 22:double, 23:double, 24:double, 27:decimal(7,3), 28:double, 25:double, 13:float, 31:double, 14:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1365 Data size: 446640 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Reducer 3 @@ -204,7 +203,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaaaaaaaaa + reduceColumnNullOrder: zzzzzzzzzzzzzzzzzzzzz reduceColumnSortOrder: +++++++++++++++++++++ allNative: false usesVectorUDFAdaptor: false @@ -314,46 +313,46 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -55 1969-12-31 16:00:11.38 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -55 1969-12-31 16:00:11.751 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -56 1969-12-31 16:00:13.602 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:13.958 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:15.038 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -57 1969-12-31 16:00:11.451 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:11.883 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:12.626 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:13.578 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:15.39 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -58 1969-12-31 16:00:12.065 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.683 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.948 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:14.066 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:15.658 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -59 1969-12-31 16:00:12.008 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.15 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.625 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.296 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.861 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -60 1969-12-31 16:00:11.504 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.641 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.996 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:12.779 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -61 1969-12-31 16:00:11.842 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:12.454 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:14.192 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:16.558 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -55 1969-12-31 16:00:12.297 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -55 1969-12-31 16:00:13.15 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -56 1969-12-31 16:00:11.242 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:13.534 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.038 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.689 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:16.37 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -57 1969-12-31 16:00:11.534 -57.0 cvLH6Eat2yFsyy7p 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:13.365 -57.0 1cGVWH7n1QU 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:14.225 -57.0 821UdmGbkEf4j 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -58 1969-12-31 16:00:12.918 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:13.209 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:14.933 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -59 1969-12-31 16:00:11.065 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.109 -59.0 1cGVWH7n1QU 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.231 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.758 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:12.227 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.242 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.278 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.069 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.125 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -60 1969-12-31 16:00:11.849 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.223 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.291 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:13.567 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:15.188 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:16.165 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.325 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.694 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, @@ -645,43 +644,43 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -61 1969-12-31 16:00:00.142 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:02.698 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:03.049 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.165 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.977 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:00.037 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.22 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.515 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.734 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:02.373 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:03.85 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:08.198 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.025 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.889 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.069 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.225 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.485 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:01.843 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:03.552 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:06.852 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:07.375 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:10.205 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:00.199 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:00.29 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:01.785 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:03.944 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:05.997 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:10.858 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -61 1969-12-31 16:00:00.554 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.339 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.497 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:03.742 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:07.538 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:09.809 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:10.713 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:00.337 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.659 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.684 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:01.419 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.123 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.922 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:04.978 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.756 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.847 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.903 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:05.654 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:07.623 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:09.14 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 15:59:58.959 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.013 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.172 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.631 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.305 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.79 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:02.496 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:03.088 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:04.662 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:10.273 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 diff --git ql/src/test/results/clientpositive/llap/vectorization_14.q.out ql/src/test/results/clientpositive/llap/vectorization_14.q.out index ac63435343..c0995cc38e 100644 --- ql/src/test/results/clientpositive/llap/vectorization_14.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_14.q.out @@ -126,10 +126,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: float), _col2 (type: double), _col3 (type: timestamp), _col4 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4] + keyColumns: 0:string, 1:float, 2:double, 3:timestamp, 4:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5, 6, 7, 8, 9, 10, 11] + valueColumns: 5:double, 6:double, 7:bigint, 8:float, 9:double, 10:double, 11:bigint Statistics: Num rows: 303 Data size: 52846 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: float), _col9 (type: double), _col10 (type: double), _col11 (type: bigint) Execution mode: vectorized, llap @@ -193,10 +193,10 @@ STAGE PLANS: sort order: ++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:float, 2:double, 3:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 12, 14, 13, 15, 8, 19, 20, 21, 22, 11, 24, 25, 23, 29, 28, 31, 34] + valueColumns: 4:boolean, 12:double, 14:double, 13:double, 15:float, 8:float, 19:float, 20:float, 21:double, 22:double, 11:bigint, 24:double, 25:double, 23:double, 29:double, 28:double, 31:double, 34:double Statistics: Num rows: 151 Data size: 36700 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: boolean), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: float), _col9 (type: float), _col10 (type: float), _col11 (type: float), _col12 (type: double), _col13 (type: double), _col14 (type: bigint), _col15 (type: double), _col16 (type: double), _col17 (type: double), _col18 (type: double), _col19 (type: double), _col20 (type: double), _col21 (type: double) Reducer 3 @@ -204,7 +204,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaa + reduceColumnNullOrder: zzzz reduceColumnSortOrder: ++++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vectorization_15.q.out ql/src/test/results/clientpositive/llap/vectorization_15.q.out index db49ad22da..441097ad6f 100644 --- ql/src/test/results/clientpositive/llap/vectorization_15.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_15.q.out @@ -122,10 +122,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4, 5, 6] + keyColumns: 0:float, 1:boolean, 2:double, 3:string, 4:tinyint, 5:int, 6:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + valueColumns: 7:double, 8:double, 9:bigint, 10:double, 11:double, 12:double, 13:bigint, 14:double, 15:double, 16:bigint Statistics: Num rows: 6144 Data size: 1278652 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col7 (type: double), _col8 (type: double), _col9 (type: bigint), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: bigint), _col14 (type: double), _col15 (type: double), _col16 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_16.q.out ql/src/test/results/clientpositive/llap/vectorization_16.q.out index 5f7c8c2412..d80c75004f 100644 --- ql/src/test/results/clientpositive/llap/vectorization_16.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_16.q.out @@ -99,10 +99,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:double, 2:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6] + valueColumns: 3:bigint, 4:double, 5:double, 6:double Statistics: Num rows: 2048 Data size: 303516 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: bigint), _col4 (type: double), _col5 (type: double), _col6 (type: double) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_17.q.out ql/src/test/results/clientpositive/llap/vectorization_17.q.out index ef477258a9..0d377c6a78 100644 --- ql/src/test/results/clientpositive/llap/vectorization_17.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_17.q.out @@ -92,10 +92,10 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3, 4] + keyColumns: 3:bigint, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [6, 2, 8, 5, 15, 16, 14, 17, 19, 20, 22, 18] + valueColumns: 6:string, 2:int, 8:timestamp, 5:double, 15:double, 16:bigint, 14:double, 17:double, 19:double, 20:double, 22:decimal(11,4), 18:double Statistics: Num rows: 4096 Data size: 1212930 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: timestamp), _col4 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: decimal(11,4)), _col13 (type: double) Execution mode: vectorized, llap @@ -120,7 +120,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vectorization_2.q.out ql/src/test/results/clientpositive/llap/vectorization_2.q.out index 32b62ba330..6a0a81ba93 100644 --- ql/src/test/results/clientpositive/llap/vectorization_2.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_2.q.out @@ -101,10 +101,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double, 4:double, 5:bigint, 6:bigint, 7:tinyint, 8:double, 9:bigint Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: tinyint), _col8 (type: double), _col9 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_3.q.out ql/src/test/results/clientpositive/llap/vectorization_3.q.out index 944c0f76d7..1e120f400a 100644 --- ql/src/test/results/clientpositive/llap/vectorization_3.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_3.q.out @@ -106,10 +106,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + valueColumns: 0:double, 1:double, 2:bigint, 3:double, 4:double, 5:bigint, 6:double, 7:double, 8:bigint, 9:double, 10:bigint, 11:bigint, 12:double, 13:double Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: double), _col10 (type: bigint), _col11 (type: bigint), _col12 (type: double), _col13 (type: double) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_4.q.out ql/src/test/results/clientpositive/llap/vectorization_4.q.out index 9eabe7e654..c34d8a90f9 100644 --- ql/src/test/results/clientpositive/llap/vectorization_4.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_4.q.out @@ -101,10 +101,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4] + valueColumns: 0:bigint, 1:double, 2:double, 3:bigint, 4:tinyint Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: double), _col2 (type: double), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_5.q.out ql/src/test/results/clientpositive/llap/vectorization_5.q.out index 15ee8e0538..9b82b34fed 100644 --- ql/src/test/results/clientpositive/llap/vectorization_5.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_5.q.out @@ -94,10 +94,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4] + valueColumns: 0:smallint, 1:bigint, 2:smallint, 3:bigint, 4:tinyint Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: smallint), _col1 (type: bigint), _col2 (type: smallint), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_7.q.out ql/src/test/results/clientpositive/llap/vectorization_7.q.out index 008068e0cd..0d60594ed7 100644 --- ql/src/test/results/clientpositive/llap/vectorization_7.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_7.q.out @@ -98,10 +98,9 @@ STAGE PLANS: sort order: +++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10, 3, 1, 0, 8, 6, 14, 15, 16, 17, 19, 20, 18, 21, 23] + keyColumns: 10:boolean, 3:bigint, 1:smallint, 0:tinyint, 8:timestamp, 6:string, 14:bigint, 15:int, 16:smallint, 17:tinyint, 19:int, 20:bigint, 18:int, 21:tinyint, 23:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 5461 Data size: 923616 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -126,7 +125,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaaa + reduceColumnNullOrder: zzzzzzzzzzzzzzz reduceColumnSortOrder: +++++++++++++++ allNative: false usesVectorUDFAdaptor: false @@ -224,31 +223,31 @@ LIMIT 25 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -2118149242 -7196 56 1969-12-31 15:59:50.462 NULL -4236298484 0 7196 -56 -39 -15242201945432 NULL -56 0 -NULL -2121399625 -7196 27 1969-12-31 15:59:50.046 NULL -4242799250 0 7196 -27 -10 -15265591701500 NULL -27 0 -NULL -2124802690 -7196 -6 1969-12-31 15:59:57.92 NULL -4249605380 0 7196 6 23 -15290080157240 NULL 6 0 -NULL -2128720310 -7196 -52 1969-12-31 15:59:45.978 NULL -4257440620 0 7196 52 69 -15318271350760 NULL 52 0 -NULL -2132232110 -200 60 1969-12-31 15:59:47.019 NULL -4264464220 -200 200 -60 -43 -426446422000 NULL -60 0 -NULL -2132536965 -7196 9 1969-12-31 15:59:46 NULL -4265073930 0 7196 -9 8 -15345736000140 NULL -9 0 -NULL -2135141157 -7196 50 1969-12-31 15:59:50.192 NULL -4270282314 0 7196 -50 -33 -15364475765772 NULL -50 0 -NULL -2137537679 -7196 -25 1969-12-31 15:59:50.136 NULL -4275075358 0 7196 25 42 -15381721138084 NULL 25 0 -NULL -2145481991 -7196 56 1969-12-31 15:59:55.667 NULL -4290963982 0 7196 -56 -39 -15438888407236 NULL -56 0 -NULL NULL -200 -36 1969-12-31 15:59:57.241 NULL NULL -200 200 36 53 NULL NULL 36 0 -NULL NULL -200 -43 1969-12-31 15:59:53.783 NULL NULL -200 200 43 60 NULL NULL 43 0 -NULL NULL -200 -58 1969-12-31 15:59:51.115 NULL NULL -200 200 58 75 NULL NULL 58 0 -NULL NULL -200 22 1969-12-31 15:59:50.109 NULL NULL -200 200 -22 -5 NULL NULL -22 0 -NULL NULL -200 3 1969-12-31 15:59:50.489 NULL NULL -200 200 -3 14 NULL NULL -3 0 -NULL NULL -200 43 1969-12-31 15:59:57.003 NULL NULL -200 200 -43 -26 NULL NULL -43 0 -NULL NULL -200 53 1969-12-31 15:59:49.46 NULL NULL -200 200 -53 -36 NULL NULL -53 0 -NULL NULL -200 9 1969-12-31 15:59:44.108 NULL NULL -200 200 -9 8 NULL NULL -9 0 -NULL NULL -7196 -38 1969-12-31 15:59:53.503 NULL NULL 0 7196 38 55 NULL NULL 38 0 -NULL NULL -7196 -49 1969-12-31 15:59:51.009 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -49 1969-12-31 15:59:52.052 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -50 1969-12-31 15:59:52.424 NULL NULL 0 7196 50 67 NULL NULL 50 0 -NULL NULL -7196 -61 1969-12-31 15:59:44.823 NULL NULL 0 7196 61 78 NULL NULL 61 0 -NULL NULL -7196 1 1969-12-31 15:59:48.361 NULL NULL 0 7196 -1 16 NULL NULL -1 0 -NULL NULL -7196 14 1969-12-31 15:59:50.291 NULL NULL 0 7196 -14 3 NULL NULL -14 0 -NULL NULL -7196 22 1969-12-31 15:59:52.699 NULL NULL 0 7196 -22 -5 NULL NULL -22 0 +true NULL -15892 29 1969-12-31 15:59:57.937 821UdmGbkEf4j NULL -215 15892 -29 -12 NULL 171 -29 0 +true NULL -15899 50 1969-12-31 15:59:46.926 821UdmGbkEf4j NULL -222 15899 -50 -33 NULL 10210 -50 0 +true NULL -15903 -2 1969-12-31 15:59:46.371 cvLH6Eat2yFsyy7p NULL -226 15903 2 19 NULL 14465 2 0 +true NULL -15920 -64 1969-12-31 15:59:51.859 cvLH6Eat2yFsyy7p NULL -243 15920 64 81 NULL 6687 64 0 +true NULL -15922 -17 1969-12-31 15:59:46.164 821UdmGbkEf4j NULL -245 15922 17 34 NULL 10851 17 0 +true NULL -15923 49 1969-12-31 15:59:47.323 cvLH6Eat2yFsyy7p NULL -246 15923 -49 -32 NULL 2628 -49 0 +true NULL -15935 -6 1969-12-31 15:59:45.859 1cGVWH7n1QU NULL -1 15935 6 23 NULL 12046 6 0 +true NULL -15948 31 1969-12-31 15:59:47.577 821UdmGbkEf4j NULL -14 15948 -31 -14 NULL 7799 -31 0 +true NULL -15948 6 1969-12-31 15:59:49.269 1cGVWH7n1QU NULL -14 15948 -6 11 NULL 12436 -6 0 +true NULL -15980 -6 1969-12-31 15:59:54.84 1cGVWH7n1QU NULL -46 15980 6 23 NULL 14836 6 0 +true NULL -15999 4 1969-12-31 15:59:46.491 1cGVWH7n1QU NULL -65 15999 -4 13 NULL 1231 -4 0 +true NULL -16017 -21 1969-12-31 15:59:44.02 821UdmGbkEf4j NULL -83 16017 21 38 NULL 2282 21 0 +true NULL -16025 -42 1969-12-31 15:59:54.534 cvLH6Eat2yFsyy7p NULL -91 16025 42 59 NULL 14242 42 0 +true NULL -16036 -15 1969-12-31 15:59:58.681 1cGVWH7n1QU NULL -102 16036 15 32 NULL 7928 15 0 +true NULL -16059 -35 1969-12-31 15:59:53.038 821UdmGbkEf4j NULL -125 16059 35 52 NULL 12437 35 0 +true NULL -16076 59 1969-12-31 15:59:55.023 821UdmGbkEf4j NULL -142 16076 -59 -42 NULL 7907 -59 0 +true NULL -16122 50 1969-12-31 15:59:51.608 1cGVWH7n1QU NULL -188 16122 -50 -33 NULL 1828 -50 0 +true NULL -16123 -20 1969-12-31 15:59:51.177 1cGVWH7n1QU NULL -189 16123 20 37 NULL 2217 20 0 +true NULL -16153 35 1969-12-31 15:59:52.036 1cGVWH7n1QU NULL -219 16153 -35 -18 NULL 14817 -35 0 +true NULL -16169 5 1969-12-31 15:59:45.059 1cGVWH7n1QU NULL -235 16169 -5 12 NULL 6104 -5 0 +true NULL -16207 -4 1969-12-31 15:59:45.956 cvLH6Eat2yFsyy7p NULL -16 16207 4 21 NULL 8290 4 0 +true NULL -16221 -12 1969-12-31 15:59:45.877 1cGVWH7n1QU NULL -30 16221 12 29 NULL 1378 12 0 +true NULL -16227 2 1969-12-31 15:59:44.065 821UdmGbkEf4j NULL -36 16227 -2 15 NULL 9761 -2 0 +true NULL -16305 3 1969-12-31 15:59:43.878 1cGVWH7n1QU NULL -114 16305 -3 14 NULL 8491 -3 0 +true NULL -16339 15 1969-12-31 15:59:53.966 821UdmGbkEf4j NULL -148 16339 -15 2 NULL 12588 -15 0 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, diff --git ql/src/test/results/clientpositive/llap/vectorization_8.q.out ql/src/test/results/clientpositive/llap/vectorization_8.q.out index de95bbbf28..662409d4f1 100644 --- ql/src/test/results/clientpositive/llap/vectorization_8.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_8.q.out @@ -94,10 +94,9 @@ STAGE PLANS: sort order: ++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 5, 10, 6, 4, 13, 14, 15, 17, 19, 16, 18, 20, 22] + keyColumns: 8:timestamp, 5:double, 10:boolean, 6:string, 4:float, 13:double, 14:double, 15:double, 17:float, 19:double, 16:double, 18:float, 20:float, 22:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 3059 Data size: 557250 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.1 Execution mode: vectorized, llap @@ -122,7 +121,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaa + reduceColumnNullOrder: zzzzzzzzzzzzzz reduceColumnSortOrder: ++++++++++++++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vectorization_9.q.out ql/src/test/results/clientpositive/llap/vectorization_9.q.out index 5f7c8c2412..d80c75004f 100644 --- ql/src/test/results/clientpositive/llap/vectorization_9.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_9.q.out @@ -99,10 +99,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:double, 2:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6] + valueColumns: 3:bigint, 4:double, 5:double, 6:double Statistics: Num rows: 2048 Data size: 303516 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col3 (type: bigint), _col4 (type: double), _col5 (type: double), _col6 (type: double) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_div0.q.out ql/src/test/results/clientpositive/llap/vectorization_div0.q.out index fcb084ad4e..76e1d5ba00 100644 --- ql/src/test/results/clientpositive/llap/vectorization_div0.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_div0.q.out @@ -111,106 +111,106 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### cint cint_div ctinyint ctinyint_div cbigint cbigint_div cdouble cdouble_div -NULL NULL -60 NULL -1016256928 NULL 15601.0 NULL -NULL NULL -60 NULL -1062217466 NULL -200.0 NULL -NULL NULL -60 NULL -1183915345 NULL -7196.0 NULL -NULL NULL -60 NULL -126921733 NULL -200.0 NULL -NULL NULL -60 NULL -1445021496 NULL -200.0 NULL -NULL NULL -60 NULL -1690528981 NULL -200.0 NULL -NULL NULL -60 NULL -1743144280 NULL 15601.0 NULL -NULL NULL -60 NULL -1802243330 NULL -7196.0 NULL -NULL NULL -60 NULL -1860186661 NULL -200.0 NULL -NULL NULL -60 NULL -2041965187 NULL 15601.0 NULL -NULL NULL -60 NULL -483910982 NULL -200.0 NULL -NULL NULL -60 NULL -508015343 NULL -200.0 NULL -NULL NULL -60 NULL -519753851 NULL 15601.0 NULL -NULL NULL -60 NULL -5953872 NULL 15601.0 NULL -NULL NULL -60 NULL -68838726 NULL -7196.0 NULL -NULL NULL -60 NULL -903925845 NULL 15601.0 NULL -NULL NULL -60 NULL 1122241452 NULL 15601.0 NULL -NULL NULL -60 NULL 1172431520 NULL -200.0 NULL -NULL NULL -60 NULL 927847540 NULL -200.0 NULL -NULL NULL -60 NULL NULL NULL -200.0 NULL -NULL NULL -61 NULL -1022679553 NULL 15601.0 NULL -NULL NULL -61 NULL -1062521098 NULL -7196.0 NULL -NULL NULL -61 NULL -1313743110 NULL -200.0 NULL -NULL NULL -61 NULL -1513172815 NULL -7196.0 NULL -NULL NULL -61 NULL -1728754595 NULL -7196.0 NULL -NULL NULL -61 NULL -1769786673 NULL -200.0 NULL -NULL NULL -61 NULL -2114172148 NULL -7196.0 NULL -NULL NULL -61 NULL -2175533 NULL -7196.0 NULL -NULL NULL -61 NULL -836697023 NULL -200.0 NULL -NULL NULL -61 NULL -854893578 NULL 15601.0 NULL -NULL NULL -61 NULL -982179838 NULL 15601.0 NULL -NULL NULL -61 NULL 1114673625 NULL 15601.0 NULL -NULL NULL -61 NULL 1139675920 NULL 15601.0 NULL -NULL NULL -61 NULL 1237548317 NULL -7196.0 NULL -NULL NULL -61 NULL 127734700 NULL -7196.0 NULL -NULL NULL -61 NULL 1399483216 NULL -200.0 NULL -NULL NULL -61 NULL 1415466231 NULL -7196.0 NULL -NULL NULL -61 NULL 184425274 NULL -200.0 NULL -NULL NULL -61 NULL 1977536065 NULL 15601.0 NULL -NULL NULL -61 NULL 484546535 NULL 15601.0 NULL -NULL NULL -61 NULL 623787602 NULL -200.0 NULL -NULL NULL -61 NULL 919939154 NULL 15601.0 NULL -NULL NULL -61 NULL 943547371 NULL -7196.0 NULL -NULL NULL -61 NULL NULL NULL -7196.0 NULL -NULL NULL -61 NULL NULL NULL -7196.0 NULL -NULL NULL -62 NULL -1113073921 NULL -200.0 NULL -NULL NULL -62 NULL -1367753794 NULL -7196.0 NULL -NULL NULL -62 NULL -1592016120 NULL 15601.0 NULL -NULL NULL -62 NULL -167812632 NULL -200.0 NULL -NULL NULL -62 NULL -1726415169 NULL 15601.0 NULL -NULL NULL -62 NULL -1761785534 NULL -7196.0 NULL -NULL NULL -62 NULL -2080605724 NULL -200.0 NULL -NULL NULL -62 NULL -642836823 NULL -7196.0 NULL -NULL NULL -62 NULL -840223244 NULL -7196.0 NULL -NULL NULL -62 NULL 1221804187 NULL -200.0 NULL -NULL NULL -62 NULL 1380844570 NULL -7196.0 NULL -NULL NULL -62 NULL 1443417260 NULL -200.0 NULL -NULL NULL -62 NULL 1607712873 NULL -200.0 NULL -NULL NULL -62 NULL 1670449519 NULL -7196.0 NULL -NULL NULL -62 NULL 2071666427 NULL -200.0 NULL -NULL NULL -62 NULL 281485844 NULL 15601.0 NULL -NULL NULL -62 NULL 325025905 NULL -200.0 NULL -NULL NULL -62 NULL 667693308 NULL 15601.0 NULL -NULL NULL -62 NULL 68899019 NULL 15601.0 NULL -NULL NULL -62 NULL 726070601 NULL -200.0 NULL -NULL NULL -62 NULL 73960976 NULL 15601.0 NULL -NULL NULL -62 NULL 756424745 NULL -7196.0 NULL -NULL NULL -62 NULL 986221936 NULL -7196.0 NULL -NULL NULL -62 NULL NULL NULL -7196.0 NULL -NULL NULL -62 NULL NULL NULL -7196.0 NULL -NULL NULL -63 NULL -1167054574 NULL 15601.0 NULL -NULL NULL -63 NULL -1224023895 NULL -7196.0 NULL -NULL NULL -63 NULL -1574729892 NULL 15601.0 NULL -NULL NULL -63 NULL -1711796768 NULL -7196.0 NULL -NULL NULL -63 NULL -1996001975 NULL 15601.0 NULL -NULL NULL -63 NULL -1999307539 NULL -200.0 NULL -NULL NULL -63 NULL -200542601 NULL 15601.0 NULL -NULL NULL -63 NULL -2070832461 NULL -200.0 NULL -NULL NULL -63 NULL -721244708 NULL 15601.0 NULL -NULL NULL -63 NULL -994504916 NULL -7196.0 NULL -NULL NULL -63 NULL -997946077 NULL -200.0 NULL -NULL NULL -63 NULL 1089367203 NULL -200.0 NULL -NULL NULL -63 NULL 1927856372 NULL -200.0 NULL -NULL NULL -63 NULL 2059199534 NULL 15601.0 NULL -NULL NULL -63 NULL 483904240 NULL 15601.0 NULL -NULL NULL -63 NULL 507317726 NULL -200.0 NULL -NULL NULL -63 NULL 956380949 NULL -200.0 NULL -NULL NULL -64 NULL -1615920595 NULL -7196.0 NULL -NULL NULL -64 NULL -1639157869 NULL -7196.0 NULL -NULL NULL -64 NULL -1809291815 NULL 15601.0 NULL -NULL NULL -64 NULL -1809444706 NULL -200.0 NULL -NULL NULL -64 NULL -527203677 NULL -7196.0 NULL -NULL NULL -64 NULL 1090418478 NULL -7196.0 NULL -NULL NULL -64 NULL 1421812187 NULL 15601.0 NULL -NULL NULL -64 NULL 1805860756 NULL -7196.0 NULL -NULL NULL -64 NULL 1960950366 NULL 15601.0 NULL -NULL NULL -64 NULL 2118653994 NULL -200.0 NULL -NULL NULL -64 NULL 406535485 NULL -7196.0 NULL -NULL NULL -64 NULL 658026952 NULL -7196.0 NULL -NULL NULL -64 NULL 927647669 NULL -200.0 NULL +-1039715238 NULL -51 NULL -86361999 NULL NULL NULL +-1039762548 NULL NULL NULL -1645852809 NULL -3802.0 NULL +-1039776293 NULL NULL NULL -1645852809 NULL 13704.0 NULL +-1041252354 NULL NULL NULL -1887561756 NULL 756.0 NULL +-1041353707 NULL 11 NULL -931949639 NULL NULL NULL +-1041391389 NULL NULL NULL 1864027286 NULL -12970.0 NULL +-1041734429 NULL NULL NULL -1645852809 NULL -836.0 NULL +-1042396242 NULL NULL NULL -1887561756 NULL 9583.0 NULL +-1042712895 NULL NULL NULL -1887561756 NULL 9296.0 NULL +-1042805968 NULL NULL NULL -1887561756 NULL 5133.0 NULL +-1043082182 NULL NULL NULL -1887561756 NULL 9180.0 NULL +-1043132597 NULL NULL NULL -1887561756 NULL 12302.0 NULL +-1043573508 NULL NULL NULL 1864027286 NULL 16216.0 NULL +-1043979188 NULL 11 NULL -8894336 NULL NULL NULL +-1044093617 NULL NULL NULL -1887561756 NULL -3422.0 NULL +-1044207190 NULL NULL NULL -1645852809 NULL 5381.0 NULL +-1044357977 NULL 11 NULL -1392575676 NULL NULL NULL +-1044748460 NULL -51 NULL 538703088 NULL NULL NULL +-1044828205 NULL -51 NULL -1627128549 NULL NULL NULL +-1045087657 NULL NULL NULL -1645852809 NULL -5865.0 NULL +-1045181724 NULL NULL NULL -1887561756 NULL -5706.0 NULL +-1045196363 NULL NULL NULL -1887561756 NULL -5039.0 NULL +-1045737053 NULL 8 NULL -1286738860 NULL NULL NULL +-1045867222 NULL NULL NULL -1887561756 NULL -8034.0 NULL +-1046399794 NULL NULL NULL -1887561756 NULL 4130.0 NULL +-1046766350 NULL 8 NULL -1069616395 NULL NULL NULL +-1046913669 NULL 8 NULL -90393132 NULL NULL NULL +-1047036113 NULL 11 NULL -240113848 NULL NULL NULL +-1047782718 NULL 11 NULL -1527855515 NULL NULL NULL +-1048097158 NULL 11 NULL -234579722 NULL NULL NULL +-1048696030 NULL 11 NULL -1554184139 NULL NULL NULL +-1048934049 NULL NULL NULL -1887561756 NULL -524.0 NULL +-1049984461 NULL 8 NULL -247067895 NULL NULL NULL +-1050165799 NULL NULL NULL 1864027286 NULL 8634.0 NULL +-1050388484 NULL 8 NULL 987404155 NULL NULL NULL +-1050657303 NULL NULL NULL -1645852809 NULL -6999.0 NULL +-1050684541 NULL NULL NULL -1887561756 NULL -8261.0 NULL +-1051223597 NULL 11 NULL -1074802968 NULL NULL NULL +-1052322972 NULL NULL NULL -1645852809 NULL -7433.0 NULL +-1052668265 NULL 8 NULL 1712280188 NULL NULL NULL +-1052745800 NULL NULL NULL -1645852809 NULL -12404.0 NULL +-1053238077 NULL NULL NULL -1645852809 NULL -3704.0 NULL +-1053254526 NULL 11 NULL 1704531790 NULL NULL NULL +-1053385587 NULL NULL NULL -1645852809 NULL 14504.0 NULL +-1054849160 NULL 11 NULL -1027630923 NULL NULL NULL +-1054958082 NULL 8 NULL 762300991 NULL NULL NULL +-1055040773 NULL -51 NULL 1331071870 NULL NULL NULL +-1055076545 NULL 11 NULL 542002983 NULL NULL NULL +-1055185482 NULL 11 NULL -398806473 NULL NULL NULL +-1055316250 NULL NULL NULL -1887561756 NULL -14990.0 NULL +-1055669248 NULL NULL NULL 1864027286 NULL 2570.0 NULL +-1055945837 NULL NULL NULL -1645852809 NULL 13690.0 NULL +-1056684111 NULL NULL NULL 1864027286 NULL 13991.0 NULL +-1058286942 NULL 8 NULL -922041114 NULL NULL NULL +-1058844180 NULL -51 NULL 822773337 NULL NULL NULL +-1058897881 NULL 8 NULL -800997317 NULL NULL NULL +-1059047258 NULL NULL NULL 1864027286 NULL 12452.0 NULL +-1059338191 NULL NULL NULL 1864027286 NULL 7322.0 NULL +-1059487309 NULL 8 NULL 1632546080 NULL NULL NULL +-1059941909 NULL NULL NULL -1887561756 NULL 8782.0 NULL +-1060624784 NULL -51 NULL -941434751 NULL NULL NULL +-1060670281 NULL 11 NULL -1705503157 NULL NULL NULL +-1060990068 NULL 11 NULL 960036652 NULL NULL NULL +-1061057428 NULL NULL NULL -1887561756 NULL -1085.0 NULL +-1061509617 NULL 8 NULL 453428995 NULL NULL NULL +-1061614989 NULL NULL NULL 1864027286 NULL -4234.0 NULL +-1062973443 NULL NULL NULL -1645852809 NULL 10541.0 NULL +-1063164541 NULL 8 NULL -74907656 NULL NULL NULL +-1063498122 NULL NULL NULL 1864027286 NULL -11480.0 NULL +-1063745167 NULL 8 NULL -68741114 NULL NULL NULL +-1064623720 NULL 11 NULL -1894858490 NULL NULL NULL +-1064718136 NULL -51 NULL 156403402 NULL NULL NULL +-1064949302 NULL NULL NULL -1645852809 NULL 6454.0 NULL +-1064981602 NULL -51 NULL -1444011153 NULL NULL NULL +-1065117869 NULL NULL NULL -1887561756 NULL 2538.0 NULL +-1065775394 NULL -51 NULL -1331703092 NULL NULL NULL +-1066226047 NULL NULL NULL 1864027286 NULL -9439.0 NULL +-1066684273 NULL -51 NULL 2034191923 NULL NULL NULL +-1066922682 NULL NULL NULL -1645852809 NULL -9987.0 NULL +-1067386090 NULL NULL NULL -1887561756 NULL -3977.0 NULL +-1067683781 NULL -51 NULL 1750003656 NULL NULL NULL +-1067874703 NULL 11 NULL -1742615956 NULL NULL NULL +-1068206466 NULL 8 NULL 1240583144 NULL NULL NULL +-1068247011 NULL 8 NULL -729456614 NULL NULL NULL +-1068336533 NULL 11 NULL 925708299 NULL NULL NULL +-1068623584 NULL NULL NULL -1887561756 NULL -14005.0 NULL +-1069097390 NULL 11 NULL -1858556598 NULL NULL NULL +-1069103950 NULL 11 NULL -927759444 NULL NULL NULL +-1069109166 NULL NULL NULL -1645852809 NULL 8390.0 NULL +-1069512165 NULL NULL NULL -1645852809 NULL 11417.0 NULL +-1069736047 NULL 11 NULL -453772520 NULL NULL NULL +-1070551679 NULL NULL NULL 1864027286 NULL -947.0 NULL +-1070883071 NULL NULL NULL -1645852809 NULL -741.0 NULL +-1071363017 NULL 8 NULL 1349676361 NULL NULL NULL +-1071480828 NULL -51 NULL -1401575336 NULL NULL NULL +-1072076362 NULL NULL NULL 1864027286 NULL -5470.0 NULL +-1072081801 NULL NULL NULL 1864027286 NULL 8373.0 NULL +-1072910839 NULL 11 NULL 2048385991 NULL NULL NULL +-1073051226 NULL NULL NULL -1887561756 NULL -7382.0 NULL +-1073279343 NULL 11 NULL -1595604468 NULL NULL NULL PREHOOK: query: explain vectorization expression select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L) as s3 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2, s3 limit 100 @@ -841,33 +841,33 @@ cint cbigint ctinyint c1 c2 c3 c4 c5 c6 518304665 1758550605 11 -50.66466248332617 2.3752809176800223 1.0 6799565 277841025 0 519195191 301311742 8 -55.590873825535546 -0.42030748533591705 1.0 5518511 301311742 0 519627078 -1887561756 NULL -58.334667723581276 0.6495936807799166 NULL 2981116 -1887561756 NULL -NULL -1111841132 0 NULL 0.5219820874778469 NULL NULL -1111841132 NULL -NULL -1300968933 0 NULL 0.5609644308891505 NULL NULL -1300968933 NULL -NULL -1355080830 0 NULL 0.5709746619109379 NULL NULL -1355080830 NULL -NULL -1379420228 0 NULL 0.5753299124049946 NULL NULL -1379420228 NULL -NULL -1418871864 0 NULL 0.5822045387685764 NULL NULL -1418871864 NULL -NULL -203039588 0 NULL 0.1662575351985599 NULL NULL -203039588 NULL -NULL -229832118 0 NULL 0.18415622913786178 NULL NULL -229832118 NULL -NULL -277546656 0 NULL 0.21419893397937406 NULL NULL -277546656 NULL -NULL -39854776 0 NULL 0.03766811940658894 NULL NULL -39854776 NULL -NULL -438779645 0 NULL 0.3011578829200047 NULL NULL -438779645 NULL -NULL -495480552 0 NULL 0.32733585778445334 NULL NULL -495480552 NULL -NULL -741129356 0 NULL 0.42125774599060745 NULL NULL -741129356 NULL -NULL -901264012 0 NULL 0.46954044013967267 NULL NULL -901264012 NULL -NULL 1018195815 0 NULL NULL NULL NULL NULL NULL -NULL 1049949527 0 NULL 33.065410651831826 NULL NULL 2077031 NULL -NULL 10989626 0 NULL -0.010910999277030852 NULL NULL 10989626 NULL -NULL 1561097160 0 NULL 2.87547115949768 NULL NULL 475294470 NULL -NULL 1580847931 0 NULL 2.8096365161452623 NULL NULL 455543699 NULL -NULL 1585496199 0 NULL 2.794808964909849 NULL NULL 450895431 NULL -NULL 1638241933 0 NULL 2.6421291665920887 NULL NULL 398149697 NULL -NULL 1738765387 0 NULL 2.413043035072816 NULL NULL 297626243 NULL -NULL 1907356119 0 NULL 2.145120638449015 NULL NULL 129035511 NULL -NULL 2136716416 0 NULL 1.9103058218951838 NULL NULL 1018195815 NULL -NULL 2144209609 0 NULL 1.904248083305452 NULL NULL 1018195815 NULL -NULL 406548885 0 NULL -0.6646790248746937 NULL NULL 406548885 NULL -NULL 473839931 0 NULL -0.8704598313848666 NULL NULL 473839931 NULL -NULL 53950949 0 NULL -0.05595150246825374 NULL NULL 53950949 NULL -NULL 618557893 0 NULL -1.5477957895096852 NULL NULL 218919971 NULL -NULL 738226024 0 NULL -2.636805997401341 NULL NULL 178286442 NULL -NULL 98841361 0 NULL -0.10751170081349277 NULL NULL 98841361 NULL +520081159 -1827280551 8 -61.52179743844285 0.6421703489910483 1.0 4411071 -1827280551 0 +520374125 59296415 8 -63.76632193888667 -0.0618379936414602 1.0 6253679 59296415 0 +520630560 275901824 -51 -65.86752598964071 -0.3716880741932343 1.0 6857105 275901824 0 +520879263 -1480800353 11 -68.03983944100872 0.5925580727020947 1.0 304991 -1480800353 0 +521019755 -1909738698 11 -69.33052868045986 0.6522477499140705 1.0 2483927 -1909738698 0 +521080737 -1918433146 8 -69.90590821340939 0.65327733652355 1.0 6752667 -1918433146 0 +521249276 -1887561756 NULL -71.54621095544556 0.6495936807799166 NULL 3979415 -1887561756 NULL +521256931 1864027286 NULL -71.62251677559098 2.2037809539011586 NULL 4530575 172364344 NULL +521315946 -986052008 11 -72.21621730196662 0.49198107972698546 1.0 1560834 -986052008 0 +521389499 -112901465 -51 -72.96990105899457 0.099815875253453 1.0 6930203 -112901465 0 +521504167 -1645852809 NULL -74.17633871931272 0.6178013397250965 NULL 1239767 -1645852809 NULL +522187830 -1887561756 NULL -82.27398980011934 0.6495936807799166 NULL 1738996 -1887561756 NULL +522957489 -1645852809 NULL -93.76572030298651 0.6178013397250965 NULL 4270635 -1645852809 NULL +523172866 -1928034601 11 -97.57227259511133 0.654407269210678 1.0 3068469 -1928034601 0 +523369608 634246195 -51 -101.32691133031916 -1.651899525255423 1.0 1688549 250296575 0 +523396209 -1887561756 NULL -101.85663156862294 0.6495936807799166 NULL 4401851 -1887561756 NULL +524224864 -801085374 -51 -121.63263627974922 0.44033070799810264 1.0 2726601 -801085374 0 +524852698 -942817737 11 -142.54287412864886 0.48078083705155344 1.0 1998900 -942817737 0 +525437671 752506166 11 -169.6549512833958 -2.832275057881536 1.0 2028447 221126868 0 +525640312 -1887561756 NULL -181.60251653592817 0.6495936807799166 NULL 1743957 -1887561756 NULL +525718152 -1624826596 8 -186.6489214890924 0.6147608091545615 1.0 1827762 -1624826596 0 +525955379 -1645852809 NULL -203.90704267834076 0.6178013397250965 NULL 2339615 -1645852809 NULL +526337887 1864027286 NULL -239.5842681439132 2.2037809539011586 NULL 1283567 172364344 NULL +527127072 1864027286 NULL -374.4611382437247 2.2037809539011586 NULL 649142 172364344 NULL +527187434 -1645852809 NULL -391.2822101143518 0.6178013397250965 NULL 380231 -1645852809 NULL +527554807 1864027286 NULL -538.3432048246867 2.2037809539011586 NULL 336327 172364344 NULL +528023644 1864027286 NULL -1033.0657082541775 2.2037809539011586 NULL 33585 172364344 NULL +528393062 -1131246885 -51 -3728.824402808652 0.5262977631364633 1.0 116822 -1131246885 0 +528534767 NULL -64 NULL NULL 1.0 NULL NULL 0 +528534767 NULL -64 NULL NULL 1.0 NULL NULL 0 diff --git ql/src/test/results/clientpositive/llap/vectorization_limit.q.out ql/src/test/results/clientpositive/llap/vectorization_limit.q.out index 3d5bea143b..0e8f42d39c 100644 --- ql/src/test/results/clientpositive/llap/vectorization_limit.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_limit.q.out @@ -147,10 +147,9 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 5, 1] + keyColumns: 0:tinyint, 5:double, 1:smallint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 9173 Data size: 109584 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: vectorized, llap @@ -175,7 +174,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false @@ -305,10 +304,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2] + valueColumns: 1:double, 2:bigint Statistics: Num rows: 131 Data size: 2492 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: double), _col2 (type: bigint) Execution mode: vectorized, llap @@ -372,10 +371,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 3] + keyColumns: 0:tinyint, 3:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Reducer 3 @@ -383,7 +381,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -433,6 +431,7 @@ POSTHOOK: query: select ctinyint,avg(cdouble + 1) as cavg from alltypesorc group POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 326.44444444444446 -46 3033.55 -47 -574.6428571428571 -48 1672.909090909091 @@ -452,7 +451,6 @@ POSTHOOK: Input: default@alltypesorc -62 245.69387755102042 -63 2178.7272727272725 -64 373.52941176470586 -NULL 9370.0945309795 PREHOOK: query: explain vectorization detail select distinct(ctinyint) as cdistinct from alltypesorc order by cdistinct limit 20 PREHOOK: type: QUERY @@ -518,11 +516,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:tinyint Statistics: Num rows: 131 Data size: 264 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Execution mode: vectorized, llap @@ -547,7 +544,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -601,6 +598,7 @@ POSTHOOK: query: select distinct(ctinyint) as cdistinct from alltypesorc order b POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 -46 -47 -48 @@ -620,7 +618,6 @@ POSTHOOK: Input: default@alltypesorc -62 -63 -64 -NULL PREHOOK: query: explain vectorization detail select ctinyint, count(distinct(cdouble)) as count_distinct from alltypesorc group by ctinyint order by ctinyint, count_distinct limit 20 PREHOOK: type: QUERY @@ -678,11 +675,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:tinyint, 1:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] + partitionColumns: 0:tinyint Statistics: Num rows: 6144 Data size: 55052 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -748,10 +744,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:tinyint, 1:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Reducer 3 @@ -759,7 +754,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -809,6 +804,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) as count_distinct fro POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -828,7 +824,6 @@ POSTHOOK: Input: default@alltypesorc -62 27 -63 19 -64 24 -NULL 2932 PREHOOK: query: explain vectorization detail select ctinyint,cdouble from alltypesorc order by ctinyint,cdouble limit 0 PREHOOK: type: QUERY @@ -916,10 +911,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] + valueColumns: 1:bigint Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: vectorized, llap @@ -974,10 +969,9 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] + keyColumns: 1:bigint, 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 4586 Data size: 64088 Basic stats: COMPLETE Column stats: COMPLETE TopN Hash Memory Usage: 0.3 Reducer 3 @@ -985,7 +979,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out index 1a87d1d593..a3e1b2cf50 100644 --- ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_nested_udf.q.out @@ -53,10 +53,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out index 89c140e28f..dd1f20f3df 100644 --- ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_part_project.q.out @@ -70,7 +70,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_part - Statistics: Num rows: 200 Data size: 1592 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: (cdouble + 2.0D) (type: double) outputColumnNames: _col0 @@ -133,13 +133,13 @@ POSTHOOK: Input: default@alltypesorc_part POSTHOOK: Input: default@alltypesorc_part@ds=2011 POSTHOOK: Input: default@alltypesorc_part@ds=2012 #### A masked pattern was here #### -NULL -NULL --15863.0 --15863.0 --14988.0 --14988.0 --14646.0 --14646.0 --14236.0 --14236.0 +-15990.0 +-15990.0 +-15918.0 +-15918.0 +-15890.0 +-15890.0 +-14305.0 +-14305.0 +-12514.0 +-12514.0 diff --git ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out index 2c54b1d612..a59a586144 100644 --- ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out +++ ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out @@ -1242,56 +1242,56 @@ LIMIT 50 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -51 -51.0 1969-12-31 15:59:43.64 -7196 -1339164819 4992406445232 NULL NULL 7196 -14392 -7196 NULL NULL 51.0 6.4051596E8 -5.157308006568995E-5 51 -1.5598627 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:45.978 -7196 -2128720310 7935869315680 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:47.15 -7196 628698169 -2343786774032 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:57.86 -7196 -26309289 98081029392 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:58.479 -7196 -1379694191 5143499944048 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 16:00:03.963 -7196 95444104 -355815619712 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 16:00:04.518 -7196 -1658319459 6182214943152 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -53 -53.0 1969-12-31 15:59:48.882 -7196 -1560660031 5818140595568 NULL NULL 7196 -14392 -7196 NULL NULL 53.0 6.4051596E8 -5.359555379375622E-5 53 -1.5010000 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -53 -53.0 1969-12-31 15:59:57.663 -7196 898472381 -3349505036368 NULL NULL 7196 -14392 -7196 NULL NULL 53.0 6.4051596E8 -5.359555379375622E-5 53 -1.5010000 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -53 -53.0 1969-12-31 16:00:11.36 -7196 -1357789899 5061840743472 NULL NULL 7196 -14392 -7196 NULL NULL 53.0 6.4051596E8 -5.359555379375622E-5 53 -1.5010000 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 15:59:53.657 -7196 1476582815 -5504700734320 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 16:00:05.688 -7196 1614836149 -6020109163472 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 16:00:06.484 -7196 1605976008 -5987078557824 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 16:00:11.198 -7196 1650677402 -6153725354656 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -55 -55.0 1969-12-31 15:59:43.932 -7196 1982381637 -7390318742736 NULL NULL 7196 -14392 -7196 NULL NULL 55.0 6.4051596E8 -5.561802752182249E-5 55 -1.4464182 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -55 -55.0 1969-12-31 16:00:01.138 -7196 888532643 -3312449693104 NULL NULL 7196 -14392 -7196 NULL NULL 55.0 6.4051596E8 -5.561802752182249E-5 55 -1.4464182 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -55 -55.0 1969-12-31 16:00:13.249 -7196 -685064281 2553919639568 NULL NULL 7196 -14392 -7196 NULL NULL 55.0 6.4051596E8 -5.561802752182249E-5 55 -1.4464182 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -56 -56.0 1969-12-31 16:00:02.298 -7196 -1509994296 5629258735488 NULL NULL 7196 -14392 -7196 NULL NULL 56.0 6.4051596E8 -5.6629264385855625E-5 56 -1.4205893 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 15:59:44.539 -7196 1839592407 -6858000493296 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 16:00:04.659 -7196 -1579093262 5886859680736 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 16:00:05.5 -7196 2042351711 -7613887178608 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 16:00:12.626 -7196 248308622 -925694542816 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -58 -58.0 1969-12-31 15:59:47.859 -7196 -1770443874 6600214762272 NULL NULL 7196 -14392 -7196 NULL NULL 58.0 6.4051596E8 -5.86517381139219E-5 58 -1.3716034 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -58 -58.0 1969-12-31 15:59:55.857 -7196 -825174557 3076250748496 NULL NULL 7196 -14392 -7196 NULL NULL 58.0 6.4051596E8 -5.86517381139219E-5 58 -1.3716034 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -58 -58.0 1969-12-31 16:00:12.065 -7196 1257970504 -4689714038912 NULL NULL 7196 -14392 -7196 NULL NULL 58.0 6.4051596E8 -5.86517381139219E-5 58 -1.3716034 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -59 -59.0 1969-12-31 16:00:13.15 -7196 -1604890000 5983029920000 NULL NULL 7196 -14392 -7196 NULL NULL 59.0 6.4051596E8 -5.966297497795504E-5 59 -1.3483559 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 15:59:45.385 -7196 1775867066 -6620432422048 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 15:59:52.408 -7196 1516314750 -5652821388000 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 15:59:55.806 -7196 -1802243330 6718763134240 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 16:00:10.618 -7196 -68838726 256630770528 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 15:59:44.823 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 15:59:48.035 -7196 1237548317 -4613580125776 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:03.049 -7196 -1513172815 5641108254320 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:06.848 -7196 1415466231 -5276858109168 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:11.842 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:12.454 -7196 -2175533 8110387024 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:14.192 -7196 -2114172148 7881633767744 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 15:59:58.395 -7196 -1367753794 5098986144032 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:01.22 -7196 1670449519 -6227435806832 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:02.373 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:03.85 -7196 -642836823 2396495676144 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:09.025 -7196 -840223244 3132352253632 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:12.388 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -63 -63.0 1969-12-31 16:00:03.552 -7196 -1224023895 4563161080560 NULL NULL 7196 -14392 -7196 NULL NULL 63.0 6.4051596E8 -6.370792243408759E-5 63 -1.2627460 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -63 -63.0 1969-12-31 16:00:07.375 -7196 -1711796768 6381578351104 NULL NULL 7196 -14392 -7196 NULL NULL 63.0 6.4051596E8 -6.370792243408759E-5 63 -1.2627460 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -63 -63.0 1969-12-31 16:00:11.946 -7196 -994504916 3707514326848 NULL NULL 7196 -14392 -7196 NULL NULL 63.0 6.4051596E8 -6.370792243408759E-5 63 -1.2627460 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 15:59:56.048 -7196 406535485 -1515564288080 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 16:00:01.785 -7196 -1639157869 6110780535632 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 16:00:11.912 -7196 -1615920595 6024151978160 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 16:00:12.339 -7196 1805860756 -6732248898368 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 +-1000804087 NULL NULL H8LCu4M2u4f1S true -51 -51.0 1969-12-31 16:00:08.451 NULL -873515594 3256466134432 1000804087 1000803223.743 NULL NULL NULL 1.0 1000803250.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1003789565 NULL NULL dq1Ji5vGb4GVow42 false -51 -51.0 1969-12-31 16:00:08.451 NULL -505400643 1884133597104 1003789565 1003788701.743 NULL NULL NULL 1.0 1003788728.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1012011232 NULL NULL 7q0iMi2GDq0Q false 11 11.0 1969-12-31 16:00:02.351 NULL -806973080 3008395642240 1012011232 1012010368.743 NULL NULL NULL 1.0 1012010395.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1015510885 NULL NULL Kw7fOuw4DHeyXe2yg false -51 -51.0 1969-12-31 16:00:08.451 NULL -67812054 252803337312 1015510885 1015510021.743 NULL NULL NULL 1.0 1015510048.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1016835101 NULL NULL Md2lY0T7reBu false 8 8.0 1969-12-31 16:00:15.892 NULL -491294009 1831544065552 1016835101 1016834237.743 NULL NULL NULL 1.0 1016834264.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1017266554 NULL NULL DU1m68i1Q7W3 false -51 -51.0 1969-12-31 16:00:08.451 NULL -145067516 540811699648 1017266554 1017265690.743 NULL NULL NULL 1.0 1017265717.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1020120834 NULL NULL 6Ob80MBP350rI275 true 8 8.0 1969-12-31 16:00:15.892 NULL -100465694 374536107232 1020120834 1020119970.743 NULL NULL NULL 1.0 1020119997.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1020466796 NULL NULL 7hCJ5yJvt0775jjgq8S0bX6W false 11 11.0 1969-12-31 16:00:02.351 NULL -926772952 3455009565056 1020466796 1020465932.743 NULL NULL NULL 1.0 1020465959.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1023165277 NULL NULL 438Lxo541TwY5ID80cnR5 false 11 11.0 1969-12-31 16:00:02.351 NULL -1004780673 3745822348944 1023165277 1023164413.743 NULL NULL NULL 1.0 1023164440.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1023644243 NULL NULL Cxas82oA2hX884xmYQ2jrpDX true 11 11.0 1969-12-31 16:00:02.351 NULL -866431241 3230055666448 1023644243 1023643379.743 NULL NULL NULL 1.0 1023643406.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1024321144 NULL NULL CE22Wjuk7d20ouN true 8 8.0 1969-12-31 16:00:15.892 NULL -94624654 352760710112 1024321144 1024320280.743 NULL NULL NULL 1.0 1024320307.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1026019772 NULL NULL T6Al7d0hN770XB65M0F2g true 11 11.0 1969-12-31 16:00:02.351 NULL -338489479 1261888777712 1026019772 1026018908.743 NULL NULL NULL 1.0 1026018935.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1039292315 NULL NULL 07488p5vb4d2 true 8 8.0 1969-12-31 16:00:15.892 NULL -432155916 1611077254848 1039292315 1039291451.743 NULL NULL NULL 1.0 1039291478.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1039495786 NULL NULL b0BEyNEe1bvQ true 8 8.0 1969-12-31 16:00:15.892 NULL -760564106 2835382987168 1039495786 1039494922.743 NULL NULL NULL 1.0 1039494949.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1039715238 NULL NULL oOt2v true -51 -51.0 1969-12-31 16:00:08.451 NULL -86361999 321957532272 1039715238 1039714374.743 NULL NULL NULL 1.0 1039714401.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1041353707 NULL NULL 25Qky6lf2pt5FP47Mqmb true 11 11.0 1969-12-31 16:00:02.351 NULL -931949639 3474308254192 1041353707 1041352843.743 NULL NULL NULL 1.0 1041352870.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1043979188 NULL NULL 2d3tQdCGQN5k7u7S false 11 11.0 1969-12-31 16:00:02.351 NULL -8894336 33158084608 1043979188 1043978324.743 NULL NULL NULL 1.0 1043978351.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1046913669 NULL NULL 40r4yyU6T0A0Mekf24k false 8 8.0 1969-12-31 16:00:15.892 NULL -90393132 336985596096 1046913669 1046912805.743 NULL NULL NULL 1.0 1046912832.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1047036113 NULL NULL Js07yFa2qnrfVU1j2e3 false 11 11.0 1969-12-31 16:00:02.351 NULL -240113848 895144425344 1047036113 1047035249.743 NULL NULL NULL 1.0 1047035276.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1048097158 NULL NULL fpt3gpLE true 11 11.0 1969-12-31 16:00:02.351 NULL -234579722 874513203616 1048097158 1048096294.743 NULL NULL NULL 1.0 1048096321.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1049984461 NULL NULL qUY8Rl34NWRg false 8 8.0 1969-12-31 16:00:15.892 NULL -247067895 921069112560 1049984461 1049983597.743 NULL NULL NULL 1.0 1049983624.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1054849160 NULL NULL CEGOy true 11 11.0 1969-12-31 16:00:02.351 NULL -1027630923 3831008080944 1054849160 1054848296.743 NULL NULL NULL 1.0 1054848323.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1055185482 NULL NULL l20vn2Awc true 11 11.0 1969-12-31 16:00:02.351 NULL -398806473 1486750531344 1055185482 1055184618.743 NULL NULL NULL 1.0 1055184645.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1058286942 NULL NULL R6q656btrqQM6a5nQ4GcVg true 8 8.0 1969-12-31 16:00:15.892 NULL -922041114 3437369272992 1058286942 1058286078.743 NULL NULL NULL 1.0 1058286105.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1058897881 NULL NULL 6fPk0A false 8 8.0 1969-12-31 16:00:15.892 NULL -800997317 2986117997776 1058897881 1058897017.743 NULL NULL NULL 1.0 1058897044.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1060624784 NULL NULL Das7E73 true -51 -51.0 1969-12-31 16:00:08.451 NULL -941434751 3509668751728 1060624784 1060623920.743 NULL NULL NULL 1.0 1060623947.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1063164541 NULL NULL 1NydRD5y5o3 false 8 8.0 1969-12-31 16:00:15.892 NULL -74907656 279255741568 1063164541 1063163677.743 NULL NULL NULL 1.0 1063163704.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1063745167 NULL NULL L47nqo true 8 8.0 1969-12-31 16:00:15.892 NULL -68741114 256266872992 1063745167 1063744303.743 NULL NULL NULL 1.0 1063744330.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1068247011 NULL NULL dPbX4jd1v47r1bB6506si false 8 8.0 1969-12-31 16:00:15.892 NULL -729456614 2719414256992 1068247011 1068246147.743 NULL NULL NULL 1.0 1068246174.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1069103950 NULL NULL 41A0nYX72UOSfxO4053xy true 11 11.0 1969-12-31 16:00:02.351 NULL -927759444 3458687207232 1069103950 1069103086.743 NULL NULL NULL 1.0 1069103113.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1069736047 NULL NULL k17Am8uPHWk02cEf1jet true 11 11.0 1969-12-31 16:00:02.351 NULL -453772520 1691663954560 1069736047 1069735183.743 NULL NULL NULL 1.0 1069735210.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-919940926 NULL NULL i1P3Wlat5EnBugL24oS4I3 true -51 -51.0 1969-12-31 16:00:08.451 NULL -533395388 1988498006464 919940926 919940062.743 NULL NULL NULL 1.0 919940089.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-923400421 NULL NULL MJ7Ej4tBYS8l2mK true 8 8.0 1969-12-31 16:00:15.892 NULL -67708318 252416609504 923400421 923399557.743 NULL NULL NULL 1.0 923399584.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-925336063 NULL NULL 060EnWLmWE4K8Pv false -51 -51.0 1969-12-31 16:00:08.451 NULL -477173411 1778902476208 925336063 925335199.743 NULL NULL NULL 1.0 925335226.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-928500968 NULL NULL 34oSgU32X true 8 8.0 1969-12-31 16:00:15.892 NULL -831143834 3098504213152 928500968 928500104.743 NULL NULL NULL 1.0 928500131.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-930153712 NULL NULL Jj21024T2xdn6 false 11 11.0 1969-12-31 16:00:02.351 NULL -737116859 2747971650352 930153712 930152848.743 NULL NULL NULL 1.0 930152875.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-930463965 NULL NULL ldk1K false 11 11.0 1969-12-31 16:00:02.351 NULL -414014176 1543444848128 930463965 930463101.743 NULL NULL NULL 1.0 930463128.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-932998902 NULL NULL kAr0ffWGEU7MHSKp true 8 8.0 1969-12-31 16:00:15.892 NULL -230462122 859162790816 932998902 932998038.743 NULL NULL NULL 1.0 932998065.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-937557606 NULL NULL 2251WSv5eA2l6WqesdKPM2 true 8 8.0 1969-12-31 16:00:15.892 NULL -532708003 1985935435184 937557606 937556742.743 NULL NULL NULL 1.0 937556769.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-943342622 NULL NULL 3w6XYq04J0Lb3Sv82eOV2HJ true -51 -51.0 1969-12-31 16:00:08.451 NULL -750731096 2798725525888 943342622 943341758.743 NULL NULL NULL 1.0 943341785.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-949286785 NULL NULL XWuYuk5qpn5Khs3764E56 true -51 -51.0 1969-12-31 16:00:08.451 NULL -946341072 3527959516416 949286785 949285921.743 NULL NULL NULL 1.0 949285948.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-954917203 NULL NULL 1M4eTm8OcOW2dAMV2V5slS1 true -51 -51.0 1969-12-31 16:00:08.451 NULL -710267209 2647876155152 954917203 954916339.743 NULL NULL NULL 1.0 954916366.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-965597463 NULL NULL b0G65a66732y6yE65hQ0 false 8 8.0 1969-12-31 16:00:15.892 NULL -922745115 3439993788720 965597463 965596599.743 NULL NULL NULL 1.0 965596626.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-970640948 NULL NULL frhe0 false 11 11.0 1969-12-31 16:00:02.351 NULL -935612665 3487964015120 970640948 970640084.743 NULL NULL NULL 1.0 970640111.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-970918963 NULL NULL suoqdh false -51 -51.0 1969-12-31 16:00:08.451 NULL -588508542 2193959844576 970918963 970918099.743 NULL NULL NULL 1.0 970918126.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-978898374 NULL NULL ShA4jlmOwF8u7kjN false 11 11.0 1969-12-31 16:00:02.351 NULL -277483031 1034456739568 978898374 978897510.743 NULL NULL NULL 1.0 978897537.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-980072140 NULL NULL Jt7E0sR3X7V true -51 -51.0 1969-12-31 16:00:08.451 NULL -819889345 3056547478160 980072140 980071276.743 NULL NULL NULL 1.0 980071303.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-980511555 NULL NULL 1TBB2v0eBqlr4c7d true 8 8.0 1969-12-31 16:00:15.892 NULL -890261594 3318895222432 980511555 980510691.743 NULL NULL NULL 1.0 980510718.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-988289401 NULL NULL CeG187j false 11 11.0 1969-12-31 16:00:02.351 NULL -446065499 1662932180272 988289401 988288537.743 NULL NULL NULL 1.0 988288564.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-993291633 NULL NULL 8reJCOg48gHGHDs true 8 8.0 1969-12-31 16:00:15.892 NULL -861531376 3211788969728 993291633 993290769.743 NULL NULL NULL 1.0 993290796.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cint, cbigint, @@ -1785,81 +1785,81 @@ LIMIT 75 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -44.0 -1416000760 15601 NULL NULL -1416000716 1416000760 44.0 -2832001476 1.0 -15601.0 NULL -1.416016361E9 0.0315682 -7197.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -48.0 -1683400285 15601 NULL NULL -1683400237 1683400285 48.0 -3366800522 1.0 -15601.0 NULL -1.683415886E9 0.0289375 -5582.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -57.0 -1057361026 15601 NULL NULL -1057360969 1057361026 57.0 -2114721995 1.0 -15601.0 NULL -1.057376627E9 0.0243684 -3251.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -62.0 -1726415169 15601 NULL NULL -1726415107 1726415169 62.0 -3452830276 1.0 -15601.0 NULL -1.72643077E9 0.0224032 -8509.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -63.0 -1167054574 15601 NULL NULL -1167054511 1167054574 63.0 -2334109085 1.0 -15601.0 NULL -1.167070175E9 0.0220476 -6168.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -44.0 -1551649760 15601 NULL NULL -1551649716 1551649760 44.0 -3103299476 1.0 -15601.0 NULL -1.551665361E9 0.0315682 -5502.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -45.0 -1022657523 15601 NULL NULL -1022657478 1022657523 45.0 -2045315001 1.0 -15601.0 NULL -1.022673124E9 0.0308667 -11973.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -45.0 -1291025659 15601 NULL NULL -1291025614 1291025659 45.0 -2582051273 1.0 -15601.0 NULL -1.29104126E9 0.0308667 -11707.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -45.0 -831227593 15601 NULL NULL -831227548 831227593 45.0 -1662455141 1.0 -15601.0 NULL -8.31243194E8 0.0308667 -6313.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -46.0 -208932264 15601 NULL NULL -208932218 208932264 46.0 -417864482 1.0 -15601.0 NULL -2.08947865E8 0.0301957 -3672.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -46.0 -468932050 15601 NULL NULL -468932004 468932050 46.0 -937864054 1.0 -15601.0 NULL -4.68947651E8 0.0301957 -12793.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -47.0 -436916225 15601 NULL NULL -436916178 436916225 47.0 -873832403 1.0 -15601.0 NULL -4.36931826E8 0.0295532 -10220.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -47.0 -493471535 15601 NULL NULL -493471488 493471535 47.0 -986943023 1.0 -15601.0 NULL -4.93487136E8 0.0295532 -11905.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -1228417392 15601 NULL NULL -1228417344 1228417392 48.0 -2456834736 1.0 -15601.0 NULL -1.228432993E9 0.0289375 -10253.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -1294837001 15601 NULL NULL -1294836953 1294837001 48.0 -2589673954 1.0 -15601.0 NULL -1.294852602E9 0.0289375 -804.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -1427685796 15601 NULL NULL -1427685748 1427685796 48.0 -2855371544 1.0 -15601.0 NULL -1.427701397E9 0.0289375 -7084.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -803222928 15601 NULL NULL -803222880 803222928 48.0 -1606445808 1.0 -15601.0 NULL -8.03238529E8 0.0289375 -5443.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -49.0 -1841324115 15601 NULL NULL -1841324066 1841324115 49.0 -3682648181 1.0 -15601.0 NULL -1.841339716E9 0.0283469 -489.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -49.0 -230127703 15601 NULL NULL -230127654 230127703 49.0 -460255357 1.0 -15601.0 NULL -2.30143304E8 0.0283469 -12953.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -50.0 -596103241 15601 NULL NULL -596103191 596103241 50.0 -1192206432 1.0 -15601.0 NULL -5.96118842E8 0.0277800 -4632.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -51.0 -546830045 15601 NULL NULL -546829994 546830045 51.0 -1093660039 1.0 -15601.0 NULL -5.46845646E8 0.0272353 -14995.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -52.0 -2097289702 15601 NULL NULL -2097289650 2097289702 52.0 -4194579352 1.0 -15601.0 NULL -2.097305303E9 0.0267115 -469.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -52.0 -886068046 15601 NULL NULL -886067994 886068046 52.0 -1772136040 1.0 -15601.0 NULL -8.86083647E8 0.0267115 -9251.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -54.0 -1114169807 15601 NULL NULL -1114169753 1114169807 54.0 -2228339560 1.0 -15601.0 NULL -1.114185408E9 0.0257222 -8791.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -54.0 -1754189160 15601 NULL NULL -1754189106 1754189160 54.0 -3508378266 1.0 -15601.0 NULL -1.754204761E9 0.0257222 -12720.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -54.0 -989710558 15601 NULL NULL -989710504 989710558 54.0 -1979421062 1.0 -15601.0 NULL -9.89726159E8 0.0257222 -14320.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -56.0 -1105322173 15601 NULL NULL -1105322117 1105322173 56.0 -2210644290 1.0 -15601.0 NULL -1.105337774E9 0.0248036 -6924.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -56.0 -1466363382 15601 NULL NULL -1466363326 1466363382 56.0 -2932726708 1.0 -15601.0 NULL -1.466378983E9 0.0248036 -9791.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -56.0 -865054294 15601 NULL NULL -865054238 865054294 56.0 -1730108532 1.0 -15601.0 NULL -8.65069895E8 0.0248036 -10046.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -1698345590 15601 NULL NULL -1698345533 1698345590 57.0 -3396691123 1.0 -15601.0 NULL -1.698361191E9 0.0243684 -5129.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -2123576095 15601 NULL NULL -2123576038 2123576095 57.0 -4247152133 1.0 -15601.0 NULL -2.123591696E9 0.0243684 -14778.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -304247740 15601 NULL NULL -304247683 304247740 57.0 -608495423 1.0 -15601.0 NULL -3.04263341E8 0.0243684 -12639.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -365505703 15601 NULL NULL -365505646 365505703 57.0 -731011349 1.0 -15601.0 NULL -3.65521304E8 0.0243684 -5475.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -59.0 -2021724111 15601 NULL NULL -2021724052 2021724111 59.0 -4043448163 1.0 -15601.0 NULL -2.021739712E9 0.0235424 -6122.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -1016256928 15601 NULL NULL -1016256868 1016256928 60.0 -2032513796 1.0 -15601.0 NULL -1.016272529E9 0.0231500 -7788.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -1743144280 15601 NULL NULL -1743144220 1743144280 60.0 -3486288500 1.0 -15601.0 NULL -1.743159881E9 0.0231500 -13348.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -519753851 15601 NULL NULL -519753791 519753851 60.0 -1039507642 1.0 -15601.0 NULL -5.19769452E8 0.0231500 -6536.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -5953872 15601 NULL NULL -5953812 5953872 60.0 -11907684 1.0 -15601.0 NULL -5969473.0 0.0231500 -9891.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -61.0 -982179838 15601 NULL NULL -982179777 982179838 61.0 -1964359615 1.0 -15601.0 NULL -9.82195439E8 0.0227705 -3282.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -63.0 -1574729892 15601 NULL NULL -1574729829 1574729892 63.0 -3149459721 1.0 -15601.0 NULL -1.574745493E9 0.0220476 -11755.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -63.0 -1996001975 15601 NULL NULL -1996001912 1996001975 63.0 -3992003887 1.0 -15601.0 NULL -1.996017576E9 0.0220476 -10035.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -44.0 -1447719201 15601 NULL NULL -1447719157 1447719201 44.0 -2895438358 1.0 -15601.0 NULL -1.447734802E9 0.0315682 -8805.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -47.0 -1828371599 15601 NULL NULL -1828371552 1828371599 47.0 -3656743151 1.0 -15601.0 NULL -1.8283872E9 0.0295532 -12404.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -48.0 -1465907371 15601 NULL NULL -1465907323 1465907371 48.0 -2931814694 1.0 -15601.0 NULL -1.465922972E9 0.0289375 -6209.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -48.0 -1666377780 15601 NULL NULL -1666377732 1666377780 48.0 -3332755512 1.0 -15601.0 NULL -1.666393381E9 0.0289375 -3768.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -48.0 -652336471 15601 NULL NULL -652336423 652336471 48.0 -1304672894 1.0 -15601.0 NULL -6.52352072E8 0.0289375 -11858.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -197652849 15601 NULL NULL -197652800 197652849 49.0 -395305649 1.0 -15601.0 NULL -1.9766845E8 0.0283469 -3780.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -211726367 15601 NULL NULL -211726318 211726367 49.0 -423452685 1.0 -15601.0 NULL -2.11741968E8 0.0283469 -5196.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -57200424 15601 NULL NULL -57200375 57200424 49.0 -114400799 1.0 -15601.0 NULL -5.7216025E7 0.0283469 -7158.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -668597606 15601 NULL NULL -668597557 668597606 49.0 -1337195163 1.0 -15601.0 NULL -6.68613207E8 0.0283469 -1150.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -990904667 15601 NULL NULL -990904618 990904667 49.0 -1981809285 1.0 -15601.0 NULL -9.90920268E8 0.0283469 -7152.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -50.0 -458110015 15601 NULL NULL -458109965 458110015 50.0 -916219980 1.0 -15601.0 NULL -4.58125616E8 0.0277800 -2251.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -52.0 -2074134645 15601 NULL NULL -2074134593 2074134645 52.0 -4148269238 1.0 -15601.0 NULL -2.074150246E9 0.0267115 -12897.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -54.0 -1795674990 15601 NULL NULL -1795674936 1795674990 54.0 -3591349926 1.0 -15601.0 NULL -1.795690591E9 0.0257222 -15491.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -54.0 -1984659810 15601 NULL NULL -1984659756 1984659810 54.0 -3969319566 1.0 -15601.0 NULL -1.984675411E9 0.0257222 -9797.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -54.0 -641670659 15601 NULL NULL -641670605 641670659 54.0 -1283341264 1.0 -15601.0 NULL -6.4168626E8 0.0257222 -1529.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -55.0 -1062767051 15601 NULL NULL -1062766996 1062767051 55.0 -2125534047 1.0 -15601.0 NULL -1.062782652E9 0.0252545 -11330.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -55.0 -1338667765 15601 NULL NULL -1338667710 1338667765 55.0 -2677335475 1.0 -15601.0 NULL -1.338683366E9 0.0252545 -8359.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -55.0 -1483320156 15601 NULL NULL -1483320101 1483320156 55.0 -2966640257 1.0 -15601.0 NULL -1.483335757E9 0.0252545 -8278.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -56.0 -1683701844 15601 NULL NULL -1683701788 1683701844 56.0 -3367403632 1.0 -15601.0 NULL -1.683717445E9 0.0248036 -10722.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -56.0 -971846497 15601 NULL NULL -971846441 971846497 56.0 -1943692938 1.0 -15601.0 NULL -9.71862098E8 0.0248036 -13404.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -57.0 -585350546 15601 NULL NULL -585350489 585350546 57.0 -1170701035 1.0 -15601.0 NULL -5.85366147E8 0.0243684 -1026.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -58.0 -1072335429 15601 NULL NULL -1072335371 1072335429 58.0 -2144670800 1.0 -15601.0 NULL -1.07235103E9 0.0239483 -694.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -58.0 -1560616588 15601 NULL NULL -1560616530 1560616588 58.0 -3121233118 1.0 -15601.0 NULL -1.560632189E9 0.0239483 -1755.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -59.0 -1315413812 15601 NULL NULL -1315413753 1315413812 59.0 -2630827565 1.0 -15601.0 NULL -1.315429413E9 0.0235424 -15497.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -59.0 -133287350 15601 NULL NULL -133287291 133287350 59.0 -266574641 1.0 -15601.0 NULL -1.33302951E8 0.0235424 -8007.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -60.0 -2041965187 15601 NULL NULL -2041965127 2041965187 60.0 -4083930314 1.0 -15601.0 NULL -2.041980788E9 0.0231500 -12701.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -60.0 -903925845 15601 NULL NULL -903925785 903925845 60.0 -1807851630 1.0 -15601.0 NULL -9.03941446E8 0.0231500 -3905.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -61.0 -1022679553 15601 NULL NULL -1022679492 1022679553 61.0 -2045359045 1.0 -15601.0 NULL -1.022695154E9 0.0227705 -2801.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -61.0 -854893578 15601 NULL NULL -854893517 854893578 61.0 -1709787095 1.0 -15601.0 NULL -8.54909179E8 0.0227705 -5581.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -62.0 -1592016120 15601 NULL NULL -1592016058 1592016120 62.0 -3184032178 1.0 -15601.0 NULL -1.592031721E9 0.0224032 -12075.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -62.0 667693308 15601 NULL NULL 667693370 -667693308 62.0 1335386678 1.0 -15601.0 NULL 6.67677707E8 0.0224032 1710.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -63.0 -200542601 15601 NULL NULL -200542538 200542601 63.0 -401085139 1.0 -15601.0 NULL -2.00558202E8 0.0220476 -7347.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -63.0 -721244708 15601 NULL NULL -721244645 721244708 63.0 -1442489353 1.0 -15601.0 NULL -7.21260309E8 0.0220476 -10478.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -64.0 -1809291815 15601 NULL NULL -1809291751 1809291815 64.0 -3618583566 1.0 -15601.0 NULL -1.809307416E9 0.0217031 -12643.0 -15601 NULL +-104148943 tEO4vj3G true 1969-12-31 15:59:44.53 2248.0 NULL 1864027286 2248 false -104146695 NULL -1864027286 NULL NULL 1.0 -2248.0 194132281226719770 1.864025038E9 NULL 1422.0 -2248 -104144447 +-110450673 uv5m1sFX10 true 1969-12-31 16:00:16.376 -8148.0 NULL 1864027286 -8148 true -110458821 NULL -1864027286 NULL NULL 1.0 8148.0 205898256323389806 1.864035434E9 NULL 1178.0 8148 -110466969 +-128417177 ygkC2e2sUm2036Sd1U8kCG62 true 1969-12-31 16:00:01.936 -8871.0 NULL 1864027286 -8871 false -128426048 NULL -1864027286 NULL NULL 1.0 8871.0 239389657705145728 1.864036157E9 NULL 8411.0 8871 -128434919 +-129248849 w3OO7InLN4ic3M0h8xpvuBMn true 1969-12-31 15:59:48.413 3255.0 NULL 1864027286 3255 true -129245594 NULL -1864027286 NULL NULL 1.0 -3255.0 240917313811277884 1.864024031E9 NULL 2711.0 -3255 -129242339 +-140351494 xh0Qhj80MAcHEMVKx true 1969-12-31 16:00:14.98 -11115.0 NULL 1864027286 -11115 true -140362609 NULL -1864027286 NULL NULL 1.0 11115.0 261639733110149174 1.864038401E9 NULL 8441.0 11115 -140373724 +-198739996 uxnt0fsrBtPD807 true 1969-12-31 16:00:11.528 -14709.0 NULL 1864027286 -14709 false -198754705 NULL -1864027286 NULL NULL 1.0 14709.0 370484193340880630 1.864041995E9 NULL 14552.0 14709 -198769414 +-203191502 wK0N1nX22KSjcTVhDYq true 1969-12-31 15:59:49.907 -6663.0 NULL 1864027286 -6663 true -203198165 NULL -1864027286 NULL NULL 1.0 6663.0 378766924025130190 1.864033949E9 NULL 6395.0 6663 -203204828 +-25028803 x8n40D35c65l true 1969-12-31 15:59:43.775 -4002.0 NULL 1864027286 -4002 true -25032805 NULL -1864027286 NULL NULL 1.0 4002.0 46661831565117230 1.864031288E9 NULL 3740.0 4002 -25036807 +-315135285 y4jD1v2Go true 1969-12-31 15:59:43.97 -4683.0 NULL 1864027286 -4683 false -315139968 NULL -1864027286 NULL NULL 1.0 4683.0 587429499261166848 1.864031969E9 NULL 1283.0 4683 -315144651 +-360475292 uq2hp true 1969-12-31 16:00:10.933 -1007.0 NULL 1864027286 -1007 false -360476299 NULL -1864027286 NULL NULL 1.0 1007.0 671937657292294514 1.864028293E9 NULL 803.0 1007 -360477306 +-362733967 tUi8QYP4S53YPcw true 1969-12-31 16:00:00.003 -7959.0 NULL 1864027286 -7959 true -362741926 NULL -1864027286 NULL NULL 1.0 7959.0 676160847840192836 1.864035245E9 NULL 5609.0 7959 -362749885 +-367195514 t5805L0xlU0YM true 1969-12-31 15:59:43.799 -13339.0 NULL 1864027286 -13339 false -367208853 NULL -1864027286 NULL NULL 1.0 13339.0 684487321652762958 1.864040625E9 NULL 8748.0 13339 -367222192 +-370283300 x0w77gi6iqtTQ1 true 1969-12-31 15:59:44.652 1850.0 NULL 1864027286 1850 true -370281450 NULL -1864027286 NULL NULL 1.0 -1850.0 690214726299644700 1.864025436E9 NULL 586.0 -1850 -370279600 +-372506148 utfrK57P2tp0 true 1969-12-31 16:00:05.326 -12525.0 NULL 1864027286 -12525 false -372518673 NULL -1864027286 NULL NULL 1.0 12525.0 694384971016511478 1.864039811E9 NULL 6686.0 12525 -372531198 +-380733719 t7s5did true NULL -2120.0 NULL 1864027286 -2120 false -380735839 NULL -1864027286 NULL NULL 1.0 2120.0 709701992654102954 1.864029406E9 NULL 326.0 2120 -380737959 +-412772386 uO4aN4J0dKv3717r8fPG true 1969-12-31 16:00:07.824 -11809.0 NULL 1864027286 -11809 true -412784195 NULL -1864027286 NULL NULL 1.0 11809.0 769441002709544770 1.864039095E9 NULL 254.0 11809 -412796004 +-452599200 v4L3dR650oy4O8MPhjc true 1969-12-31 15:59:46.988 8757.0 NULL 1864027286 8757 false -452590443 NULL -1864027286 NULL NULL 1.0 -8757.0 843640935134827698 1.864018529E9 NULL 3509.0 -8757 -452581686 +-459571311 taArL704d542R82qw8 true 1969-12-31 16:00:00.738 -13901.0 NULL 1864027286 -13901 false -459585212 NULL -1864027286 NULL NULL 1.0 13901.0 856679375410094632 1.864041187E9 NULL 493.0 13901 -459599113 +-487903609 tINcSR1MT3f2P4 true 1969-12-31 16:00:12.099 -9147.0 NULL 1864027286 -9147 false -487912756 NULL -1864027286 NULL NULL 1.0 9147.0 909482690371460216 1.864036433E9 NULL 5891.0 9147 -487921903 +-518918140 ugq0uAy0qXj2D0fX true 1969-12-31 16:00:12.479 5245.0 NULL 1864027286 5245 false -518912895 NULL -1864027286 NULL NULL 1.0 -5245.0 967267795337252970 1.864022041E9 NULL 1491.0 -5245 -518907650 +-520054643 wc4Ae163B5VxG2L true 1969-12-31 16:00:06.693 301.0 NULL 1864027286 301 true -520054342 NULL -1864027286 NULL NULL 1.0 -301.0 969395483690775812 1.864026985E9 NULL 205.0 -301 -520054041 +-520765672 vQalqQ true 1969-12-31 15:59:44.48 -3969.0 NULL 1864027286 -3969 false -520769641 NULL -1864027286 NULL NULL 1.0 3969.0 970728820544424326 1.864031255E9 NULL 2312.0 3969 -520773610 +-532611088 wLWrtVNx188P7uXPV true 1969-12-31 16:00:04.012 -1428.0 NULL 1864027286 -1428 false -532612516 NULL -1864027286 NULL NULL 1.0 1428.0 992804262689111576 1.864028714E9 NULL 338.0 1428 -532613944 +-553779656 weQ0d24K116Y0 true 1969-12-31 16:00:12.009 11147.0 NULL 1864027286 11147 true -553768509 NULL -1864027286 NULL NULL 1.0 -11147.0 1032239610903536574 1.864016139E9 NULL 3652.0 -11147 -553757362 +-601825532 v4gQqo0bxX256o7EEN42lSoU true 1969-12-31 15:59:58.417 11021.0 NULL 1864027286 11021 false -601814511 NULL -1864027286 NULL NULL 1.0 -11021.0 1121798669614747146 1.864016265E9 NULL 1472.0 -11021 -601803490 +-64947310 vvictFVSOgi true 1969-12-31 15:59:48.172 6612.0 NULL 1864027286 6612 false -64940698 NULL -1864027286 NULL NULL 1.0 -6612.0 121051233043885628 1.864020674E9 NULL 5306.0 -6612 -64934086 +-719899789 umNykRkKiih6Cx6K42 true 1969-12-31 15:59:55.878 -10134.0 NULL 1864027286 -10134 true -719909923 NULL -1864027286 NULL NULL 1.0 10134.0 1341931739934158978 1.86403742E9 NULL 9728.0 10134 -719920057 +-758062600 vA0bEQqO50LlKcj7AAR56P63 true 1969-12-31 16:00:16.169 7111.0 NULL 1864027286 7111 false -758055489 NULL -1864027286 NULL NULL 1.0 -7111.0 1413036115798072854 1.864020175E9 NULL 6634.0 -7111 -758048378 +-770958258 uXu1mj3tWs36cGpu4p3aHq true 1969-12-31 15:59:56.944 8059.0 NULL 1864027286 8059 false -770950199 NULL -1864027286 NULL NULL 1.0 -8059.0 1437072207083129914 1.864019227E9 NULL 4763.0 -8059 -770942140 +-778541551 t66fkUkSNP78t2856Lcn true 1969-12-31 16:00:03.35 15678.0 NULL 1864027286 15678 true -778525873 NULL -1864027286 NULL NULL 1.0 -15678.0 1451193470128970678 1.864011608E9 NULL 7154.0 -15678 -778510195 +-804390280 uNJPm true 1969-12-31 16:00:12.321 -10737.0 NULL 1864027286 -10737 true -804401017 NULL -1864027286 NULL NULL 1.0 10737.0 1499425444574149862 1.864038023E9 NULL 8927.0 10737 -804411754 +-804959350 v2wRf43gpDUt1lfieq true 1969-12-31 16:00:08.659 -8072.0 NULL 1864027286 -8072 true -804967422 NULL -1864027286 NULL NULL 1.0 8072.0 1500481238949076692 1.864035358E9 NULL 686.0 8072 -804975494 +-87388872 veoqj217BlDBBVkN0ei3c true 1969-12-31 16:00:03.492 10039.0 NULL 1864027286 10039 false -87378833 NULL -1864027286 NULL NULL 1.0 -10039.0 162876528930837238 1.864017247E9 NULL 5844.0 -10039 -87368794 +-894394703 tFtQ26aDMi1tJ026luPcu true 1969-12-31 15:59:56.928 -3178.0 NULL 1864027286 -3178 true -894397881 NULL -1864027286 NULL NULL 1.0 3178.0 1667182054724580966 1.864030464E9 NULL 3166.0 3178 -894401059 +-933664265 ue8IUf0GlY18RT325P2tu true 1969-12-31 16:00:02.456 13750.0 NULL 1864027286 13750 false -933650515 NULL -1864027286 NULL NULL 1.0 -13750.0 1740350035547952290 1.864013536E9 NULL 8536.0 -13750 -933636765 +-947255611 vgKx505VdPsHO true 1969-12-31 15:59:46.062 13661.0 NULL 1864027286 13661 true -947241950 NULL -1864027286 NULL NULL 1.0 -13661.0 1765684841243847700 1.864013625E9 NULL 11158.0 -13661 -947228289 +1030560824 tmS75um6Mvyb6N1oiKP7 true 1969-12-31 15:59:53.233 -11073.0 NULL 1864027286 -11073 false 1030549751 NULL -1864027286 NULL NULL 1.0 11073.0 -1920972855444505786 1.864038359E9 NULL 9539.0 11073 1030538678 +108023602 veIw1kh7 true 1969-12-31 16:00:14.188 9239.0 NULL 1864027286 9239 true 108032841 NULL -1864027286 NULL NULL 1.0 -9239.0 -201376163408099526 1.864018047E9 NULL 3602.0 -9239 108042080 +136715714 y2Q3YW true 1969-12-31 15:59:50.737 11813.0 NULL 1864027286 11813 false 136727527 NULL -1864027286 NULL NULL 1.0 -11813.0 -254863841075301722 1.864015473E9 NULL 6764.0 -11813 136739340 +194353234 vtad71tYi1fs1e0tcJg0 true 1969-12-31 15:59:55.372 2960.0 NULL 1864027286 2960 true 194356194 NULL -1864027286 NULL NULL 1.0 -2960.0 -362285248819109484 1.864024326E9 NULL 2806.0 -2960 194359154 +200690208 wfT8d53abPxBj0L true 1969-12-31 16:00:15.522 -12052.0 NULL 1864027286 -12052 true 200678156 NULL -1864027286 NULL NULL 1.0 12052.0 -374069558488164616 1.864039338E9 NULL 4706.0 12052 200666104 +2101183 x7By66525 true 1969-12-31 16:00:05.831 -8915.0 NULL 1864027286 -8915 false 2092268 NULL -1864027286 NULL NULL 1.0 8915.0 -3900044641624648 1.864036201E9 NULL 7766.0 8915 2083353 +223484391 tca24E6L true 1969-12-31 16:00:02.505 -12721.0 NULL 1864027286 -12721 false 223471670 NULL -1864027286 NULL NULL 1.0 12721.0 -416557290527987620 1.864040007E9 NULL 6435.0 12721 223458949 +236934374 wiBqE2A1x8T8gcT4 true 1969-12-31 16:00:11.324 -15101.0 NULL 1864027286 -15101 false 236919273 NULL -1864027286 NULL NULL 1.0 15101.0 -441623989451283078 1.864042387E9 NULL 5149.0 15101 236904172 +245429195 vXc7m82uAg2g24 true 1969-12-31 15:59:57.185 -16001.0 NULL 1864027286 -16001 false 245413194 NULL -1864027286 NULL NULL 1.0 16001.0 -457456889960411484 1.864043287E9 NULL 6792.0 16001 245397193 +247204221 wblxBWSlwWlX7E true 1969-12-31 15:59:54.186 4502.0 NULL 1864027286 4502 true 247208723 NULL -1864027286 NULL NULL 1.0 -4502.0 -460803805009215778 1.864022784E9 NULL 1198.0 -4502 247213225 +252479879 tdUWi true 1969-12-31 16:00:01.806 -877.0 NULL 1864027286 -877 false 252479002 NULL -1864027286 NULL NULL 1.0 877.0 -470627748870048572 1.864028163E9 NULL 620.0 877 252478125 +304132102 vxAjxUq0k true 1969-12-31 16:00:03.466 -12962.0 NULL 1864027286 -12962 true 304119140 NULL -1864027286 NULL NULL 1.0 12962.0 -566886375154854040 1.864040248E9 NULL 952.0 12962 304106178 +308450217 t7i26BC11U1YTY8I0p true 1969-12-31 15:59:46.402 1017.0 NULL 1864027286 1017 true 308451234 NULL -1864027286 NULL NULL 1.0 -1017.0 -574961516576370924 1.864026269E9 NULL 530.0 -1017 308452251 +319983133 t78m7 true 1969-12-31 16:00:09.36 14512.0 NULL 1864027286 14512 true 319997645 NULL -1864027286 NULL NULL 1.0 -14512.0 -596484341735741470 1.864012774E9 NULL 4422.0 -14512 320012157 +336043289 xow6f03825H0h8mFjVr true 1969-12-31 15:59:51.587 -97.0 NULL 1864027286 -97 true 336043192 NULL -1864027286 NULL NULL 1.0 97.0 -626393679162536912 1.864027383E9 NULL 14.0 97 336043095 +336056067 tJ7bf true 1969-12-31 15:59:50.481 16124.0 NULL 1864027286 16124 false 336072191 NULL -1864027286 NULL NULL 1.0 -16124.0 -626447734089803626 1.864011162E9 NULL 12266.0 -16124 336088315 +396908469 uGD31tQ70Py2E0T true 1969-12-31 15:59:50.224 16084.0 NULL 1864027286 16084 false 396924553 NULL -1864027286 NULL NULL 1.0 -16084.0 -739878197275353158 1.864011202E9 NULL 4274.0 -16084 396940637 +421764768 whw6kHIbH true 1969-12-31 16:00:06.463 5142.0 NULL 1864027286 5142 true 421769910 NULL -1864027286 NULL NULL 1.0 -5142.0 -786190620653764260 1.864022144E9 NULL 866.0 -5142 421775052 +426284338 u6ELlhG3 true 1969-12-31 16:00:00.64 -15070.0 NULL 1864027286 -15070 true 426269268 NULL -1864027286 NULL NULL 1.0 15070.0 -794577546735246648 1.864042356E9 NULL 3916.0 15070 426254198 +434741484 uxI8i true 1969-12-31 16:00:12.505 8120.0 NULL 1864027286 8120 false 434749604 NULL -1864027286 NULL NULL 1.0 -8120.0 -810385124433694744 1.864019166E9 NULL 86.0 -8120 434757724 +460817498 v3A1iI77YBRwl3I16 true 1969-12-31 16:00:08.026 7391.0 NULL 1864027286 7391 true 460824889 NULL -1864027286 NULL NULL 1.0 -7391.0 -858990167163921254 1.864019895E9 NULL 2304.0 -7391 460832280 +466063930 w6OUE6V3UjfE2 true 1969-12-31 15:59:56.958 14276.0 NULL 1864027286 14276 true 466078206 NULL -1864027286 NULL NULL 1.0 -14276.0 -868782493393928916 1.86401301E9 NULL 9966.0 -14276 466092482 +526337887 t0346137k7Lk0O true 1969-12-31 15:59:51.609 15044.0 NULL 1864027286 15044 false 526352931 NULL -1864027286 NULL NULL 1.0 -15044.0 -981136225450075266 1.864012242E9 NULL 466.0 -15044 526367975 +54908166 wLIR3B37 true 1969-12-31 16:00:05.971 8499.0 NULL 1864027286 8499 true 54916665 NULL -1864027286 NULL NULL 1.0 -8499.0 -102366162016121190 1.864018787E9 NULL 1109.0 -8499 54925164 +573439687 vALXyM54AgSH4e0O4IN true 1969-12-31 16:00:10.069 -150.0 NULL 1864027286 -150 false 573439537 NULL -1864027286 NULL NULL 1.0 150.0 -1068906943839206582 1.864027436E9 NULL 86.0 150 573439387 +573476034 x1832l1R2m3V true 1969-12-31 15:59:49.722 -5070.0 NULL 1864027286 -5070 false 573470964 NULL -1864027286 NULL NULL 1.0 5070.0 -1068965524624723704 1.864032356E9 NULL 1226.0 5070 573465894 +58198060 t7Sx50XeM true 1969-12-31 16:00:07.889 7557.0 NULL 1864027286 7557 true 58205617 NULL -1864027286 NULL NULL 1.0 -7557.0 -108496858286465462 1.864019729E9 NULL 2552.0 -7557 58213174 +605953955 x5vy367f6d81FfL8AI8XJ true 1969-12-31 16:00:01.206 11683.0 NULL 1864027286 11683 false 605965638 NULL -1864027286 NULL NULL 1.0 -11683.0 -1129536483610398468 1.864015603E9 NULL 4636.0 -11683 605977321 +732924624 yxN0212hM17E8J8bJj8D7b true 1969-12-31 15:59:46.461 -6751.0 NULL 1864027286 -6751 false 732917873 NULL -1864027286 NULL NULL 1.0 6751.0 -1366178913669082678 1.864034037E9 NULL 1925.0 6751 732911122 +741306115 y1uSBY0 true 1969-12-31 15:59:56.456 -16032.0 NULL 1864027286 -16032 false 741290083 NULL -1864027286 NULL NULL 1.0 16032.0 -1381784941553204738 1.864043318E9 NULL 2678.0 16032 741274051 +746145173 wEe2THv60F6 true 1969-12-31 16:00:03.372 -5589.0 NULL 1864027286 -5589 true 746139584 NULL -1864027286 NULL NULL 1.0 5589.0 -1390824543740689024 1.864032875E9 NULL 773.0 5589 746133995 +773036466 xnk564ke0a7kay3aE6IC true 1969-12-31 16:00:12.369 -12066.0 NULL 1864027286 -12066 true 773024400 NULL -1864027286 NULL NULL 1.0 12066.0 -1440938574343778400 1.864039352E9 NULL 11276.0 12066 773012334 +773348268 vwb48kytjp0Q2YEb true 1969-12-31 15:59:44.909 12581.0 NULL 1864027286 12581 false 773360849 NULL -1864027286 NULL NULL 1.0 -12581.0 -1441565724460125814 1.864014705E9 NULL 1164.0 -12581 773373430 +855072260 y7S47c5V true 1969-12-31 16:00:08.381 -11734.0 NULL 1864027286 -11734 false 855060526 NULL -1864027286 NULL NULL 1.0 11734.0 -1593856151645512436 1.86403902E9 NULL 10982.0 11734 855048792 +86487282 vH8AHgcWaDm true 1969-12-31 16:00:10.869 13309.0 NULL 1864027286 13309 false 86500591 NULL -1864027286 NULL NULL 1.0 -13309.0 -161239461879126026 1.864013977E9 NULL 8673.0 -13309 86513900 +872474570 wT50ouOe760m3AyJ7x4p83U6 true 1969-12-31 15:59:46.57 -2856.0 NULL 1864027286 -2856 true 872471714 NULL -1864027286 NULL NULL 1.0 2856.0 -1626311081159188204 1.864030142E9 NULL 1766.0 2856 872468858 +936765787 wP0re2S74Y308jgOTc6 true 1969-12-31 15:59:50.924 -10311.0 NULL 1864027286 -10311 false 936755476 NULL -1864027286 NULL NULL 1.0 10311.0 -1746137767573918136 1.864037597E9 NULL 4706.0 10311 936745165 +95424126 txKwQS70d20 true 1969-12-31 16:00:16.343 9766.0 NULL 1864027286 9766 false 95433892 NULL -1864027286 NULL NULL 1.0 -9766.0 -177891378697177112 1.86401752E9 NULL 632.0 -9766 95443658 +97246854 vvK378scVFuBh8Q3HXUJsP true 1969-12-31 16:00:01.629 -9554.0 NULL 1864027286 -9554 true 97237300 NULL -1864027286 NULL NULL 1.0 9554.0 -181252980416967800 1.86403684E9 NULL 3670.0 9554 97227746 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cstring2, @@ -2110,7 +2110,7 @@ POSTHOOK: Input: default@alltypesorc 1969-12-31 16:00:05.83 06Tj8f5xNhpaiE71AWqJ7b5 15601.0 -49.0 226841234 15601 63558.76548052676 -15858 315168.0 -15601.0 158740.17500000002 -6432.0 49.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:05.997 12AEw 15601.0 -64.0 1421812187 15601 398378.30961053516 -15858 411648.0 -15601.0 158740.17500000002 -6432.0 64.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:07.499 14MDiWrX 15601.0 -33.0 42147119 15601 11809.223592042588 -15858 212256.0 -15601.0 158740.17500000002 -6432.0 33.0 NULL -15601.0 -2.43391201E8 -1969-12-31 16:00:08.451 rVWAj4N1MCg8Scyp7wj2C NULL -51.0 -89010 NULL -24.93975903614458 NULL 328032.0 NULL NULL -6432.0 51.0 NULL NULL NULL +1969-12-31 16:00:08.488 16jmamsEtKc51n 15601.0 1.0 -832606494 15601 -233288.45446903896 -15858 -6432.0 -15601.0 158740.17500000002 -6432.0 -1.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:09.123 064GHv0UW8 15601.0 -14.0 1007181336 15601 282202.67189688986 -15858 90048.0 -15601.0 158740.17500000002 -6432.0 14.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:11.928 0UugmY0R5hI 15601.0 -32.0 1701987317 15601 476880.7276548053 -15858 205824.0 -15601.0 158740.17500000002 -6432.0 32.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:12.853 12gbSP4px465TdXmV5F2apmC 15601.0 28.0 -1556827241 15601 -436208.2490893808 -15858 -180096.0 -15601.0 158740.17500000002 -6432.0 -28.0 NULL -15601.0 -2.43391201E8 diff --git ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out index d142f09c34..5f50131405 100644 --- ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_bucketmapjoin1.q.out @@ -150,6 +150,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -227,6 +230,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -304,6 +310,9 @@ STAGE PLANS: output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/llap/vectorized_case.q.out ql/src/test/results/clientpositive/llap/vectorized_case.q.out index 2591c288b3..7e78396bde 100644 --- ql/src/test/results/clientpositive/llap/vectorized_case.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_case.q.out @@ -317,10 +317,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap @@ -461,10 +460,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out index 7e78360791..826fc5fb7b 100644 --- ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_distinct_gby.q.out @@ -82,10 +82,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized, llap @@ -216,10 +215,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6105 Data size: 18232 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -291,10 +289,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double) Reducer 3 diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out index da71d06ed6..68ffb3cc8b 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_partition_pruning.q.out @@ -328,6 +328,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -470,6 +473,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -684,6 +690,9 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col1 (type: string) Statistics: Num rows: 2200 Data size: 809600 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -703,6 +712,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -879,6 +891,9 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col1 (type: string) Statistics: Num rows: 2200 Data size: 809600 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -898,6 +913,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -1084,6 +1102,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1226,6 +1247,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1394,6 +1418,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1536,6 +1563,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1702,6 +1732,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1859,6 +1892,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2001,6 +2037,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2143,6 +2182,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2313,6 +2355,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2447,6 +2492,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2618,6 +2666,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2790,6 +2841,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -2947,6 +3001,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -3084,6 +3141,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -3221,6 +3281,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -3403,6 +3466,9 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col1 (type: string) Statistics: Num rows: 1100 Data size: 404800 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -3422,6 +3488,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -3595,6 +3664,9 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col1 (type: string) Statistics: Num rows: 1 Data size: 202 Basic stats: COMPLETE Column stats: NONE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Operator Tree: @@ -3614,6 +3686,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: vectorized, llap Reduce Vectorization: @@ -3783,6 +3858,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -4042,6 +4120,9 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: string) Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: COMPLETE + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -4404,6 +4485,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 6 Execution mode: vectorized, llap Reduce Vectorization: @@ -5612,6 +5696,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out index 2aa9af2e0d..c22a1277f7 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction.q.out @@ -180,6 +180,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -414,6 +417,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -648,6 +654,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -956,6 +965,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1248,6 +1260,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -1511,6 +1526,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out index e4169543b5..946f37e612 100644 --- ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_dynamic_semijoin_reduction2.q.out @@ -279,10 +279,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(10,1)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [1] + keyColumns: 1:decimal(10,1) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 100 Data size: 11200 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -331,10 +330,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: decimal(10,1)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [1] + keyColumns: 1:decimal(10,1) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 20 Data size: 2240 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: _col0 (type: decimal(10,1)) @@ -360,10 +358,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:decimal(10,1), 1:decimal(10,1), 2:binary Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) Execution mode: vectorized, llap @@ -402,6 +399,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: @@ -472,10 +472,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:decimal(10,1), 1:decimal(10,1), 2:binary Statistics: Num rows: 1 Data size: 336 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: decimal(10,1)), _col1 (type: decimal(10,1)), _col2 (type: binary) diff --git ql/src/test/results/clientpositive/llap/vectorized_join46.q.out ql/src/test/results/clientpositive/llap/vectorized_join46.q.out index 6b25672237..2f5eb26aaa 100644 --- ql/src/test/results/clientpositive/llap/vectorized_join46.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_join46.q.out @@ -19,6 +19,7 @@ POSTHOOK: Output: default@test1_n14 POSTHOOK: Lineage: test1_n14.col_1 SCRIPT [] POSTHOOK: Lineage: test1_n14.key SCRIPT [] POSTHOOK: Lineage: test1_n14.value SCRIPT [] +col1 col2 col3 PREHOOK: query: CREATE TABLE test2_n9 (key INT, value INT, col_2 STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -40,16 +41,22 @@ POSTHOOK: Output: default@test2_n9 POSTHOOK: Lineage: test2_n9.col_2 SCRIPT [] POSTHOOK: Lineage: test2_n9.key SCRIPT [] POSTHOOK: Lineage: test2_n9.value SCRIPT [] -PREHOOK: query: EXPLAIN +col1 col2 col3 +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -67,9 +74,14 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -77,12 +89,19 @@ STAGE PLANS: keys: 0 _col1 (type: int) 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 input vertices: 1 Map 2 Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 8 Data size: 859 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -90,23 +109,50 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -128,6 +174,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 101 2 Car 103 2 Ema @@ -136,20 +183,25 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema NULL NULL None NULL NULL NULL -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND test1_n14.key between 100 and 102 AND test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND test1_n14.key between 100 and 102 AND test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -167,9 +219,14 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -180,12 +237,19 @@ STAGE PLANS: keys: 0 _col1 (type: int) 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 input vertices: 1 Map 2 Statistics: Num rows: 6 Data size: 952 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 6 Data size: 952 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -193,27 +257,57 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan alias: test2_n9 filterExpr: key BETWEEN 100 AND 102 (type: boolean) Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true predicate: key BETWEEN 100 AND 102 (type: boolean) Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -239,6 +333,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 98 NULL None NULL NULL NULL @@ -246,18 +341,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat NULL NULL NULL NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102 AND test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102 AND test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -275,9 +375,14 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -288,12 +393,20 @@ STAGE PLANS: keys: 0 1 + Map Join Vectorization: + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: Outer Join has keys IS false outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 input vertices: 1 Map 2 Statistics: Num rows: 6 Data size: 1142 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 6 Data size: 1142 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -301,25 +414,55 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan alias: test2_n9 filterExpr: key BETWEEN 100 AND 102 (type: boolean) Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Filter Operator + Filter Vectorization: + className: VectorFilterOperator + native: true predicate: key BETWEEN 100 AND 102 (type: boolean) Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -344,22 +487,28 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 101 2 Car 102 2 Del 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat NULL NULL NULL NULL NULL None NULL NULL NULL -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND true) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND true) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -377,26 +526,49 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -404,12 +576,19 @@ STAGE PLANS: keys: 0 _col1 (type: int) 1 _col1 (type: int) + Map Join Vectorization: + className: VectorMapJoinOuterLongOperator + native: true + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 input vertices: 0 Map 1 Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 8 Data size: 856 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -417,6 +596,15 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -438,6 +626,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 101 2 Car 102 2 Del 101 2 Car 103 2 Ema 99 2 Mat 102 2 Del @@ -445,16 +634,21 @@ POSTHOOK: Input: default@test2_n9 NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -472,9 +666,14 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: @@ -485,12 +684,20 @@ STAGE PLANS: keys: 0 1 + Map Join Vectorization: + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: Outer Join has keys IS false outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 input vertices: 1 Map 2 Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false Statistics: Num rows: 24 Data size: 4580 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -498,21 +705,48 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -535,6 +769,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -548,20 +783,25 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat NULL NULL NULL NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -603,21 +843,45 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -644,6 +908,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -658,18 +923,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 103 2 Ema NULL NULL None 102 2 Del Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -711,21 +981,45 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -750,6 +1044,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -764,18 +1059,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 103 2 Ema NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -817,21 +1117,45 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -856,6 +1180,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 101 2 Car 102 2 Del 101 2 Car 103 2 Ema @@ -864,20 +1189,25 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema NULL NULL None 102 2 Del -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND (test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102)) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 LEFT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND (test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102)) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -919,23 +1249,47 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Map 2 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Stage: Stage-0 Fetch Operator @@ -961,6 +1315,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 101 2 Car 103 2 Ema @@ -969,20 +1324,25 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 102 2 Del NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1000,16 +1360,34 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan @@ -1039,6 +1417,12 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Stage: Stage-0 Fetch Operator @@ -1065,6 +1449,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -1079,18 +1464,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 103 2 Ema NULL NULL None 102 2 Del Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1108,16 +1498,34 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan @@ -1147,6 +1555,12 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Stage: Stage-0 Fetch Operator @@ -1171,6 +1585,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -1182,18 +1597,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1211,16 +1631,34 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan @@ -1250,6 +1688,12 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Stage: Stage-0 Fetch Operator @@ -1274,6 +1718,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 101 2 Car 102 2 Del 101 2 Car 103 2 Ema @@ -1284,20 +1729,25 @@ POSTHOOK: Input: default@test2_n9 NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None NULL NULL None 102 2 Del -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND (test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102)) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 RIGHT OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND (test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102)) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1315,18 +1765,36 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 2 Map Operator Tree: TableScan @@ -1356,6 +1824,12 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Execution mode: llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false Stage: Stage-0 Fetch Operator @@ -1381,26 +1855,32 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 101 2 Car 102 2 Del 101 2 Car 103 2 Ema 99 2 Mat 102 2 Del NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1418,37 +1898,73 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 3 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1462,6 +1978,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1488,6 +2007,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -1502,18 +2022,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 103 2 Ema NULL NULL None 102 2 Del Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test1_n14.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1531,37 +2056,73 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 3 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1575,6 +2136,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1599,6 +2163,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -1613,18 +2178,23 @@ POSTHOOK: Input: default@test2_n9 99 2 Mat 103 2 Ema NULL NULL None NULL NULL NULL Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value OR test2_n9.key between 100 and 102) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1642,37 +2212,73 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 3 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: + Reduce Sink Vectorization: + className: VectorReduceSinkEmptyKeyOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1686,6 +2292,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1710,6 +2319,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob 102 2 Del 101 2 Car 102 2 Del 101 2 Car 103 2 Ema @@ -1720,20 +2330,25 @@ POSTHOOK: Input: default@test2_n9 NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None NULL NULL None 102 2 Del -PREHOOK: query: EXPLAIN +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND (test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102)) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR SELECT * FROM test1_n14 FULL OUTER JOIN test2_n9 ON (test1_n14.value=test2_n9.value AND (test1_n14.key between 100 and 102 OR test2_n9.key between 100 and 102)) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + STAGE DEPENDENCIES: Stage-1 is a root stage Stage-0 depends on stages: Stage-1 @@ -1751,41 +2366,77 @@ STAGE PLANS: TableScan alias: test1_n14 Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_1 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 6 Data size: 572 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Map 3 Map Operator Tree: TableScan alias: test2_n9 Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE + TableScan Vectorization: + native: true Select Operator expressions: key (type: int), value (type: int), col_2 (type: string) outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) + Reduce Sink Vectorization: + className: VectorReduceSinkLongOperator + native: true + nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true Statistics: Num rows: 4 Data size: 380 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: int), _col2 (type: string) Execution mode: vectorized, llap LLAP IO: no inputs + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: true + usesVectorUDFAdaptor: false + vectorized: true Reducer 2 Execution mode: llap Reduce Operator Tree: Merge Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col1 (type: int) 1 _col1 (type: int) @@ -1799,6 +2450,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1824,6 +2478,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n14 POSTHOOK: Input: default@test2_n9 #### A masked pattern was here #### +test1_n14.key test1_n14.value test1_n14.col_1 test2_n9.key test2_n9.value test2_n9.col_2 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 101 2 Car 103 2 Ema diff --git ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out index 41ca076b33..0c751db364 100644 --- ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_mapjoin.q.out @@ -56,6 +56,7 @@ STAGE PLANS: className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1 input vertices: 1 Map 3 diff --git ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out index 8efe78d916..c9b9e81df0 100644 --- ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out @@ -146,14 +146,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:decimal(8,1) bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1) className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:decimal(8,1) + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Reducer 3 @@ -217,10 +219,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:int Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -281,10 +282,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -351,14 +351,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:decimal(8,1) bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1) className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:decimal(8,1) + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Reducer 3 @@ -422,10 +424,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:int Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -486,10 +487,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -556,14 +556,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:decimal(8,1) bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1) className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:decimal(8,1) + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Reducer 3 @@ -627,10 +629,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:int Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized, llap @@ -691,10 +692,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 diff --git ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out index 748dea1913..d52e212fa4 100644 --- ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_ptf.q.out @@ -155,11 +155,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -200,7 +200,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -216,7 +216,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -244,7 +244,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -410,10 +410,10 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 5] + valueColumns: 1:string, 2:string, 5:int Statistics: Num rows: 26 Data size: 5902 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_mfgr (type: string), p_size (type: int) Execution mode: vectorized, llap @@ -455,10 +455,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -494,6 +493,9 @@ STAGE PLANS: Map-reduce partition columns: _col2 (type: string) Statistics: Num rows: 27 Data size: 6021 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: int) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: llap Reduce Vectorization: @@ -515,7 +517,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -547,7 +549,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -663,11 +665,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -708,7 +710,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -822,11 +824,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -867,7 +869,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -883,7 +885,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -911,7 +913,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1074,11 +1076,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -1119,7 +1121,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1151,7 +1153,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1296,11 +1298,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -1341,7 +1343,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1382,7 +1384,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1520,11 +1522,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 3, 4, 5, 6, 7, 8] + partitionColumns: 2:string + valueColumns: 0:int, 3:string, 4:string, 5:int, 6:string, 7:double, 8:string Statistics: Num rows: 26 Data size: 16094 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_partkey (type: int), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) Execution mode: vectorized, llap @@ -1566,10 +1568,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1609,7 +1610,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1641,6 +1642,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Stage: Stage-0 Fetch Operator @@ -1745,10 +1749,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -1781,11 +1784,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 3, 4, 5, 6, 7, 8] + partitionColumns: 2:string + valueColumns: 0:int, 3:string, 4:string, 5:int, 6:string, 7:double, 8:string Statistics: Num rows: 26 Data size: 16094 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_partkey (type: int), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) Execution mode: vectorized, llap @@ -1827,6 +1830,9 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: llap Reduce Vectorization: @@ -1848,7 +1854,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1958,7 +1964,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST, p_size DESC NULLS LAST + order by: p_name ASC NULLS LAST, p_size DESC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int partition by: p_mfgr raw input shape: @@ -1999,7 +2005,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2015,7 +2021,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aaz + reduceColumnNullOrder: azz reduceColumnSortOrder: ++- allNative: false usesVectorUDFAdaptor: false @@ -2043,7 +2049,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2184,7 +2190,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST + order by: p_name ASC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int, p_retailprice: double partition by: p_mfgr raw input shape: @@ -2226,7 +2232,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2243,7 +2249,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2271,7 +2277,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2430,11 +2436,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2475,7 +2481,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2491,7 +2497,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2519,7 +2525,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2681,11 +2687,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -2726,7 +2732,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2740,7 +2746,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2748,7 +2754,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2781,7 +2787,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2789,7 +2795,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2805,7 +2811,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2833,7 +2839,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3002,11 +3008,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -3047,7 +3053,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -3079,7 +3085,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3224,11 +3230,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 5, 7] + partitionColumns: 2:string + valueColumns: 0:int, 5:int, 7:double Statistics: Num rows: 26 Data size: 6110 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_partkey (type: int), p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -3270,10 +3276,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE Execution mode: vectorized, llap LLAP IO: all inputs @@ -3313,7 +3318,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -3344,6 +3349,9 @@ STAGE PLANS: Map-reduce partition columns: _col2 (type: string) Statistics: Num rows: 27 Data size: 6237 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col5 (type: int), _col7 (type: double) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 4 Execution mode: llap Reduce Vectorization: @@ -3365,7 +3373,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3518,11 +3526,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -3563,7 +3571,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3756,11 +3764,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 13 Data size: 2574 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col2 (type: double) Execution mode: vectorized, llap @@ -3807,7 +3815,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST output shape: _col0: string, _col1: string, _col2: double partition by: _col0 raw input shape: @@ -3839,7 +3847,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -4019,11 +4027,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 6006 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized, llap @@ -4064,7 +4072,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -4102,7 +4110,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4193,7 +4201,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4235,7 +4243,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col3 raw input shape: window functions: @@ -4536,10 +4544,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 5] + valueColumns: 1:string, 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_size (type: int) Execution mode: vectorized, llap @@ -4580,14 +4588,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4601,7 +4609,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4609,7 +4617,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4642,7 +4650,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4650,7 +4658,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4885,10 +4893,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 5] + valueColumns: 1:string, 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_name (type: string), p_size (type: int) Execution mode: vectorized, llap @@ -4929,14 +4937,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4968,7 +4976,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5000,7 +5008,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5016,7 +5024,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -5044,7 +5052,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5231,10 +5239,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -5275,14 +5283,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5314,14 +5322,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5337,7 +5345,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -5365,7 +5373,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5553,10 +5561,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -5597,14 +5605,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5636,7 +5644,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5650,7 +5658,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5684,7 +5692,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5917,10 +5925,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -5961,7 +5969,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5975,7 +5983,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5983,7 +5991,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -6016,7 +6024,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -6024,7 +6032,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -6040,7 +6048,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -6068,7 +6076,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2, _col1 raw input shape: window functions: @@ -6250,10 +6258,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 5798 Basic stats: COMPLETE Column stats: COMPLETE value expressions: p_size (type: int) Execution mode: vectorized, llap @@ -6294,14 +6302,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -6315,7 +6323,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -6349,7 +6357,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -6366,7 +6374,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -6394,7 +6402,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out index 6421650e39..7142732099 100644 --- ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_shufflejoin.q.out @@ -134,6 +134,9 @@ STAGE PLANS: sort order: Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: bigint), _col1 (type: int), _col2 (type: int), _col3 (type: bigint), _col4 (type: bigint) + MergeJoin Vectorization: + enabled: false + enableConditionsNotMet: Vectorizing MergeJoin Supported IS false Reducer 3 Execution mode: vectorized, llap Reduce Vectorization: diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out index aa4d8889ca..384bf61f27 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out @@ -155,10 +155,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:timestamp, 1:timestamp Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: timestamp), _col1 (type: timestamp) Execution mode: vectorized, llap @@ -381,10 +380,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:double, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double), _col1 (type: bigint) Execution mode: vectorized, llap @@ -522,10 +520,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2] + valueColumns: 0:double, 1:double, 2:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint) Execution mode: vectorized, llap diff --git ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out index 9033b885b2..2204f26663 100644 --- ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out @@ -259,7 +259,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 4356 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 4276 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true Select Operator @@ -270,7 +270,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17] selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp - Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 16756 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -278,7 +278,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 16756 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp) Execution mode: vectorized, llap LLAP IO: all inputs @@ -307,13 +307,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] - Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 16756 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 16836 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 16756 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -371,6 +371,46 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.628 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.628 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.637 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.637 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.64 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.64 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.661 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.661 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.676 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.676 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.705 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.705 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.709 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.709 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.72 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.72 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.721 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.721 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.749 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.749 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.771 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.771 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.773 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.773 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.782 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.782 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.783 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.783 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.807 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.807 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.82 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.82 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.619 1969-12-31 16:00:14.793 1969-12-31 23:59:43.619 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.619 1969-12-31 23:59:43.619 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.627 1969-12-31 16:00:03.679 1969-12-31 23:59:43.627 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.627 1969-12-31 23:59:43.627 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.631 1969-12-31 16:00:06.612 1969-12-31 23:59:43.631 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.631 1969-12-31 23:59:43.631 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.642 1969-12-31 16:00:04.424 1969-12-31 23:59:43.642 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.642 1969-12-31 23:59:43.642 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.643 1969-12-31 16:00:11.764 1969-12-31 23:59:43.643 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.643 1969-12-31 23:59:43.643 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.645 1969-12-31 16:00:00.077 1969-12-31 23:59:43.645 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.645 1969-12-31 23:59:43.645 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.661 1969-12-31 15:59:58.732 1969-12-31 23:59:43.661 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.661 1969-12-31 23:59:43.661 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.689 1969-12-31 15:59:46.848 1969-12-31 23:59:43.689 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.689 1969-12-31 23:59:43.689 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.695 1969-12-31 16:00:06.867 1969-12-31 23:59:43.695 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.695 1969-12-31 23:59:43.695 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.707 1969-12-31 15:59:56.965 1969-12-31 23:59:43.707 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.707 1969-12-31 23:59:43.707 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.71 1969-12-31 16:00:00.687 1969-12-31 23:59:43.71 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.71 1969-12-31 23:59:43.71 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.723 1969-12-31 16:00:03.375 1969-12-31 23:59:43.723 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.723 1969-12-31 23:59:43.723 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.745 1969-12-31 16:00:04.052 1969-12-31 23:59:43.745 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.745 1969-12-31 23:59:43.745 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.764 1969-12-31 16:00:10.52 1969-12-31 23:59:43.764 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.764 1969-12-31 23:59:43.764 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.775 1969-12-31 15:59:48.003 1969-12-31 23:59:43.775 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.775 1969-12-31 23:59:43.775 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.779 1969-12-31 15:59:53.274 1969-12-31 23:59:43.779 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.779 1969-12-31 23:59:43.779 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.785 1969-12-31 16:00:14.096 1969-12-31 23:59:43.785 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.785 1969-12-31 23:59:43.785 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.792 1969-12-31 15:59:52.041 1969-12-31 23:59:43.792 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.792 1969-12-31 23:59:43.792 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.793 1969-12-31 15:59:56.316 1969-12-31 23:59:43.793 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.793 1969-12-31 23:59:43.793 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.811 1969-12-31 16:00:00.479 1969-12-31 23:59:43.811 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.811 1969-12-31 23:59:43.811 NULL +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 -45479202281 528 10 25 25 43 8 15 18 true 0528-10-27 08:15:18.941718273 NULL 0528-10-27 08:15:18.941718273 2000-12-18 08:42:30.0005 0528-10-27 08:15:18.941718273 0528-10-27 08:15:18.941718273 NULL 1632453512 2021 9 24 24 38 3 18 32 NULL 2021-09-24 03:18:32.4 1974-10-04 17:21:03.989 1319-02-02 16:31:57.778 2021-09-24 03:18:32.4 1974-10-04 17:21:03.989 NULL 1974-10-04 17:21:03.989 1632453512 2021 9 24 24 38 3 18 32 false 2021-09-24 03:18:32.4 1999-10-03 16:59:10.396903939 1319-02-02 16:31:57.778 2021-09-24 03:18:32.4 1999-10-03 16:59:10.396903939 NULL 1999-10-03 16:59:10.396903939 @@ -380,47 +420,7 @@ POSTHOOK: Input: default@alltypesorc_string 163809583224 7160 12 2 2 48 6 0 24 NULL 7160-12-02 06:00:24.81200852 NULL 1319-02-02 16:31:57.778 7160-12-02 06:00:24.81200852 NULL NULL NULL 490699811 1985 7 20 20 29 9 30 11 true 1985-07-20 09:30:11 1319-02-02 16:31:57.778 1985-07-20 09:30:11 2000-12-18 08:42:30.0005 1985-07-20 09:30:11 1985-07-20 09:30:11 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:44.028 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:44.028 NULL 1969-12-31 15:59:44.028 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:44.809 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:44.809 NULL 1969-12-31 15:59:44.809 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:50.531 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:50.531 NULL 1969-12-31 15:59:50.531 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:51.009 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:51.009 NULL 1969-12-31 15:59:51.009 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:53.761 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:53.761 NULL 1969-12-31 15:59:53.761 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:00.905 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:00.905 NULL 1969-12-31 16:00:00.905 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:03.586 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:03.586 NULL 1969-12-31 16:00:03.586 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:05.227 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:05.227 NULL 1969-12-31 16:00:05.227 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:05.535 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:05.535 NULL 1969-12-31 16:00:05.535 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.02 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.02 NULL 1969-12-31 16:00:07.02 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.365 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.365 NULL 1969-12-31 16:00:07.365 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.517 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.517 NULL 1969-12-31 16:00:07.517 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.767 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.767 NULL 1969-12-31 16:00:07.767 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:08.602 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:08.602 NULL 1969-12-31 16:00:08.602 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:09.938 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:09.938 NULL 1969-12-31 16:00:09.938 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:14.214 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:14.214 NULL 1969-12-31 16:00:14.214 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:14.783 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:14.783 NULL 1969-12-31 16:00:14.783 NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:43.773 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:44.262 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:44.568 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:47.351 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:47.446 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:48.023 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:48.629 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:49.177 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:49.208 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:50.789 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:51.245 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:52.372 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:55.249 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.661 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.784 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:01.836 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:09.313 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:09.538 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:09.986 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:11.031 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:11.465 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:13.589 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 2024-11-11 16:42:41.101 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL NULL NULL 2000-12-18 08:42:30.0005 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT @@ -469,7 +469,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 1017 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 5537 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true Select Operator @@ -565,7 +565,47 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 -2736272726 1883 4 17 17 16 4 14 34 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 -62018199211 4 9 24 22 39 18 26 29 1365554626 2013 4 10 10 15 0 43 46 206730996125 8521 1 16 16 3 20 42 5 @@ -577,46 +617,6 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1) AS c1, year(ctimestamp1) = year(stimestamp1), @@ -663,7 +663,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3097 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 52 Data size: 7617 Basic stats: COMPLETE Column stats: COMPLETE TableScan Vectorization: native: true Select Operator @@ -767,50 +767,50 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT to_unix_timestamp(stimestamp1) AS c1, year(stimestamp1), @@ -1088,7 +1088,7 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -0528-10-27 08:15:18.941718273 7160-12-02 06:00:24.81200852 8 52 +0528-10-27 08:15:18.941718273 7160-12-02 06:00:24.81200852 48 52 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT round(sum(ctimestamp1), 3) FROM alltypesorc_string @@ -1218,7 +1218,7 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -2.89160478029166E11 +2.891604773267E11 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT round(avg(ctimestamp1), 0), variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19, @@ -1377,4 +1377,4 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -3.6145059754E10 false false false 7.5245178084814E10 7.5245178084814E10 7.5245178084814E10 8.0440478971476E10 +6.024176611E9 false false false 3.3542405863247E10 3.3542405863247E10 3.3542405863247E10 3.3897361841912E10 diff --git ql/src/test/results/clientpositive/llap/windowing.q.out ql/src/test/results/clientpositive/llap/windowing.q.out index d752941831..d0ca305122 100644 --- ql/src/test/results/clientpositive/llap/windowing.q.out +++ ql/src/test/results/clientpositive/llap/windowing.q.out @@ -1859,7 +1859,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/llap/windowing_gby.q.out ql/src/test/results/clientpositive/llap/windowing_gby.q.out index 4fa1fb9291..3ee0d375bb 100644 --- ql/src/test/results/clientpositive/llap/windowing_gby.q.out +++ ql/src/test/results/clientpositive/llap/windowing_gby.q.out @@ -26,7 +26,7 @@ Stage-0 Select Operator [SEL_46] (rows=2 width=4) Output:["_col0"] PTF Operator [PTF_45] (rows=2 width=16) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_44] (rows=2 width=16) Output:["_col1","_col2"] <-Reducer 3 [SIMPLE_EDGE] vectorized, llap diff --git ql/src/test/results/clientpositive/localtimezone.q.out ql/src/test/results/clientpositive/localtimezone.q.out index b4c6d86247..d63309000e 100644 --- ql/src/test/results/clientpositive/localtimezone.q.out +++ ql/src/test/results/clientpositive/localtimezone.q.out @@ -555,12 +555,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamptz_test2 #### A masked pattern was here #### cable 0.0 1 -pin NULL 1 -hdmi 1.25 2 -keyboard 2 3.9 3 -keyboard 3 3.99 4 -keyboard 1 4.2 5 -laptop 2 10.0 6 +hdmi 1.25 1 +keyboard 2 3.9 2 +keyboard 3 3.99 3 +keyboard 1 4.2 4 +laptop 2 10.0 5 +pin NULL 6 mouse 1 3.1 1 mouse 2 4.594 2 laptop 1 9.2 3 diff --git ql/src/test/results/clientpositive/manyViewJoin.q.out ql/src/test/results/clientpositive/manyViewJoin.q.out new file mode 100644 index 0000000000..9958f6f5d0 --- /dev/null +++ ql/src/test/results/clientpositive/manyViewJoin.q.out @@ -0,0 +1,26043 @@ +PREHOOK: query: drop table if exists test_hive_1035 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1035 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1035 +( + test_hive_1018 string + ,test_hive_1004 string + ,test_hive_1025 string + ,test_hive_23 string + ,test_hive_27 string + ,test_hive_29 string + ,test_hive_30 string + ,test_hive_97 string + ,test_hive_96 string + ,test_hive_98 string + ,test_hive_101 string + ,test_hive_102 string + ,test_hive_109 string + ,test_hive_111 string + ,test_hive_112 string + ,test_hive_113 string + ,test_hive_114 string + ,test_hive_115 string + ,test_hive_78 string + ,test_hive_79 string + ,test_hive_24 string + ,test_hive_26 string + ,test_hive_110 string + ,test_hive_77 string + ,test_hive_87 string + ,test_hive_92 string + ,test_hive_90 string + ,test_hive_74 string + ,test_hive_85 string + ,test_hive_81 string + ,test_hive_82 string + ,test_hive_106 string + ,test_hive_107 string + ,test_hive_108 string + ,test_hive_75 string + ,test_hive_86 string + ,test_hive_76 string + ,test_hive_89 string + ,test_hive_88 string + ,test_hive_91 string + ,test_hive_71 string + ,test_hive_72 string + ,test_hive_73 string + ,test_hive_80 string + ,test_hive_103 string + ,test_hive_104 string + ,test_hive_1002 string + ,test_hive_1003 string + ,test_hive_25 string + ,test_hive_28 string + ,test_hive_93 string + ,test_hive_94 string + ,test_hive_95 string + ,test_hive_99 string + ,test_hive_105 string + ,test_hive_83 string + ,test_hive_84 string + ,test_hive_100 string + ,test_hive_1023 string + ,test_hive_1024 string + ,test_hive_1010 string + ,test_hive_1010_a_d string + ,test_hive_1010_a_g string + ,test_hive_1026 string + ,test_hive_1000 string + ,test_hive_1001 string + ,test_hive_1030 string + ,test_hive_1030_1 string + ,test_hive_1030_2 string + ,test_hive_1030_3 string + ,test_hive_1021 string + ,test_hive_1020 string + ,test_hive_1022 string + ,test_hive_1019 string + ,test_hive_1027 string + ,test_hive_1028 string + ,test_hive_1029 string + ,test_hive_1005 string + ,test_hive_1005_a_d string + ,test_hive_1005_psr string + ,test_hive_1005_psr_a_d string + ,test_hive_1005_psr_e string + ,test_hive_1013 string + ,test_hive_1013_a_d string + ,test_hive_1013_psr string + ,test_hive_1013_psr_a_d string + ,test_hive_1013_psr_e string + ,test_hive_1034 string +) +partitioned by (ds int, ts int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1035 +POSTHOOK: query: create table test_hive_1035 +( + test_hive_1018 string + ,test_hive_1004 string + ,test_hive_1025 string + ,test_hive_23 string + ,test_hive_27 string + ,test_hive_29 string + ,test_hive_30 string + ,test_hive_97 string + ,test_hive_96 string + ,test_hive_98 string + ,test_hive_101 string + ,test_hive_102 string + ,test_hive_109 string + ,test_hive_111 string + ,test_hive_112 string + ,test_hive_113 string + ,test_hive_114 string + ,test_hive_115 string + ,test_hive_78 string + ,test_hive_79 string + ,test_hive_24 string + ,test_hive_26 string + ,test_hive_110 string + ,test_hive_77 string + ,test_hive_87 string + ,test_hive_92 string + ,test_hive_90 string + ,test_hive_74 string + ,test_hive_85 string + ,test_hive_81 string + ,test_hive_82 string + ,test_hive_106 string + ,test_hive_107 string + ,test_hive_108 string + ,test_hive_75 string + ,test_hive_86 string + ,test_hive_76 string + ,test_hive_89 string + ,test_hive_88 string + ,test_hive_91 string + ,test_hive_71 string + ,test_hive_72 string + ,test_hive_73 string + ,test_hive_80 string + ,test_hive_103 string + ,test_hive_104 string + ,test_hive_1002 string + ,test_hive_1003 string + ,test_hive_25 string + ,test_hive_28 string + ,test_hive_93 string + ,test_hive_94 string + ,test_hive_95 string + ,test_hive_99 string + ,test_hive_105 string + ,test_hive_83 string + ,test_hive_84 string + ,test_hive_100 string + ,test_hive_1023 string + ,test_hive_1024 string + ,test_hive_1010 string + ,test_hive_1010_a_d string + ,test_hive_1010_a_g string + ,test_hive_1026 string + ,test_hive_1000 string + ,test_hive_1001 string + ,test_hive_1030 string + ,test_hive_1030_1 string + ,test_hive_1030_2 string + ,test_hive_1030_3 string + ,test_hive_1021 string + ,test_hive_1020 string + ,test_hive_1022 string + ,test_hive_1019 string + ,test_hive_1027 string + ,test_hive_1028 string + ,test_hive_1029 string + ,test_hive_1005 string + ,test_hive_1005_a_d string + ,test_hive_1005_psr string + ,test_hive_1005_psr_a_d string + ,test_hive_1005_psr_e string + ,test_hive_1013 string + ,test_hive_1013_a_d string + ,test_hive_1013_psr string + ,test_hive_1013_psr_a_d string + ,test_hive_1013_psr_e string + ,test_hive_1034 string +) +partitioned by (ds int, ts int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1035 +PREHOOK: query: create table if not exists test_hive_1038 +( + test_hive_1018 string + ,test_hive_1004 string + ,test_hive_1025 string + ,test_hive_23 string + ,test_hive_27 string + ,test_hive_29 string + ,test_hive_30 string + ,test_hive_97 string + ,test_hive_96 string + ,test_hive_98 string + ,test_hive_101 string + ,test_hive_102 string + ,test_hive_109 string + ,test_hive_111 string + ,test_hive_112 string + ,test_hive_113 string + ,test_hive_114 string + ,test_hive_115 string + ,test_hive_78 string + ,test_hive_79 string + ,test_hive_24 string + ,test_hive_26 string + ,test_hive_110 string + ,test_hive_77 string + ,test_hive_87 string + ,test_hive_92 string + ,test_hive_90 string + ,test_hive_74 string + ,test_hive_85 string + ,test_hive_81 string + ,test_hive_82 string + ,test_hive_106 string + ,test_hive_107 string + ,test_hive_108 string + ,test_hive_75 string + ,test_hive_86 string + ,test_hive_76 string + ,test_hive_89 string + ,test_hive_88 string + ,test_hive_91 string + ,test_hive_71 string + ,test_hive_72 string + ,test_hive_73 string + ,test_hive_80 string + ,test_hive_103 string + ,test_hive_104 string + ,test_hive_1002 string + ,test_hive_1003 string + ,test_hive_25 string + ,test_hive_28 string + ,test_hive_93 string + ,test_hive_94 string + ,test_hive_95 string + ,test_hive_99 string + ,test_hive_105 string + ,test_hive_83 string + ,test_hive_84 string + ,test_hive_100 string + ,test_hive_1023 string + ,test_hive_1024 string + ,test_hive_1010 string + ,test_hive_1010_a_d string + ,test_hive_1010_a_g string + ,test_hive_1026 string + ,test_hive_1000 string + ,test_hive_1001 string + ,test_hive_1030 string + ,test_hive_1030_1 string + ,test_hive_1030_2 string + ,test_hive_1030_3 string + ,test_hive_1021 string + ,test_hive_1020 string + ,test_hive_1022 string + ,test_hive_1019 string + ,test_hive_1027 string + ,test_hive_1028 string + ,test_hive_1029 string + ,test_hive_1005 string + ,test_hive_1005_a_d string + ,test_hive_1005_psr string + ,test_hive_1005_psr_a_d string + ,test_hive_1005_psr_e string + ,test_hive_1013 string + ,test_hive_1013_a_d string + ,test_hive_1013_psr string + ,test_hive_1013_psr_a_d string + ,test_hive_1013_psr_e string + ,test_hive_1034 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1038 +POSTHOOK: query: create table if not exists test_hive_1038 +( + test_hive_1018 string + ,test_hive_1004 string + ,test_hive_1025 string + ,test_hive_23 string + ,test_hive_27 string + ,test_hive_29 string + ,test_hive_30 string + ,test_hive_97 string + ,test_hive_96 string + ,test_hive_98 string + ,test_hive_101 string + ,test_hive_102 string + ,test_hive_109 string + ,test_hive_111 string + ,test_hive_112 string + ,test_hive_113 string + ,test_hive_114 string + ,test_hive_115 string + ,test_hive_78 string + ,test_hive_79 string + ,test_hive_24 string + ,test_hive_26 string + ,test_hive_110 string + ,test_hive_77 string + ,test_hive_87 string + ,test_hive_92 string + ,test_hive_90 string + ,test_hive_74 string + ,test_hive_85 string + ,test_hive_81 string + ,test_hive_82 string + ,test_hive_106 string + ,test_hive_107 string + ,test_hive_108 string + ,test_hive_75 string + ,test_hive_86 string + ,test_hive_76 string + ,test_hive_89 string + ,test_hive_88 string + ,test_hive_91 string + ,test_hive_71 string + ,test_hive_72 string + ,test_hive_73 string + ,test_hive_80 string + ,test_hive_103 string + ,test_hive_104 string + ,test_hive_1002 string + ,test_hive_1003 string + ,test_hive_25 string + ,test_hive_28 string + ,test_hive_93 string + ,test_hive_94 string + ,test_hive_95 string + ,test_hive_99 string + ,test_hive_105 string + ,test_hive_83 string + ,test_hive_84 string + ,test_hive_100 string + ,test_hive_1023 string + ,test_hive_1024 string + ,test_hive_1010 string + ,test_hive_1010_a_d string + ,test_hive_1010_a_g string + ,test_hive_1026 string + ,test_hive_1000 string + ,test_hive_1001 string + ,test_hive_1030 string + ,test_hive_1030_1 string + ,test_hive_1030_2 string + ,test_hive_1030_3 string + ,test_hive_1021 string + ,test_hive_1020 string + ,test_hive_1022 string + ,test_hive_1019 string + ,test_hive_1027 string + ,test_hive_1028 string + ,test_hive_1029 string + ,test_hive_1005 string + ,test_hive_1005_a_d string + ,test_hive_1005_psr string + ,test_hive_1005_psr_a_d string + ,test_hive_1005_psr_e string + ,test_hive_1013 string + ,test_hive_1013_a_d string + ,test_hive_1013_psr string + ,test_hive_1013_psr_a_d string + ,test_hive_1013_psr_e string + ,test_hive_1034 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1038 +PREHOOK: query: drop table if exists test_hive_1037 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1037 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1037 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1037 +POSTHOOK: query: create table if not exists test_hive_1037 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1037 +PREHOOK: query: drop view if exists test_hive_1040 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1040 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1040 +as +select + cast(test_hive_1018 as int) as test_hive_1018 + ,cast(test_hive_1004 as int) as test_hive_1004 + ,cast(test_hive_1025 as int) as test_hive_1025 + ,cast(test_hive_23 as string) as test_hive_23 + ,cast(test_hive_27 as string) as test_hive_27 + ,cast(test_hive_29 as string) as test_hive_29 + ,cast(test_hive_30 as string) as test_hive_30 + ,cast(test_hive_97 as string) as test_hive_97 + ,cast(test_hive_96 as string) as test_hive_96 + ,cast(test_hive_98 as string) as test_hive_98 + ,cast(test_hive_101 as string) as test_hive_101 + ,cast(test_hive_102 as string) as test_hive_102 + ,cast(test_hive_109 as string) as test_hive_109 + ,cast(test_hive_111 as string) as test_hive_111 + ,cast(test_hive_112 as string) as test_hive_112 + ,cast(test_hive_113 as string) as test_hive_113 + ,cast(test_hive_114 as string) as test_hive_114 + ,cast(test_hive_115 as string) as test_hive_115 + ,cast(test_hive_78 as string) as test_hive_78 + ,cast(test_hive_79 as string) as test_hive_79 + ,cast(test_hive_24 as string) as test_hive_24 + ,cast(test_hive_26 as string) as test_hive_26 + ,cast(test_hive_110 as string) as test_hive_110 + ,cast(test_hive_77 as string) as test_hive_77 + ,cast(test_hive_87 as string) as test_hive_87 + ,cast(test_hive_92 as string) as test_hive_92 + ,cast(test_hive_90 as string) as test_hive_90 + ,cast(test_hive_74 as string) as test_hive_74 + ,cast(test_hive_85 as string) as test_hive_85 + ,cast(test_hive_81 as string) as test_hive_81 + ,cast(test_hive_82 as string) as test_hive_82 + ,cast(test_hive_106 as string) as test_hive_106 + ,cast(test_hive_107 as string) as test_hive_107 + ,cast(test_hive_108 as string) as test_hive_108 + ,cast(test_hive_75 as string) as test_hive_75 + ,cast(test_hive_86 as string) as test_hive_86 + ,cast(test_hive_76 as string) as test_hive_76 + ,cast(test_hive_89 as string) as test_hive_89 + ,cast(test_hive_88 as string) as test_hive_88 + ,cast(test_hive_91 as string) as test_hive_91 + ,cast(test_hive_71 as string) as test_hive_71 + ,cast(test_hive_72 as string) as test_hive_72 + ,cast(test_hive_73 as string) as test_hive_73 + ,cast(test_hive_80 as string) as test_hive_80 + ,cast(test_hive_103 as string) as test_hive_103 + ,cast(test_hive_104 as string) as test_hive_104 + ,cast(test_hive_1002 as string) as test_hive_1002 + ,cast(test_hive_1003 as string) as test_hive_1003 + ,cast(from_unixtime(unix_timestamp(test_hive_25,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_25 + ,cast(test_hive_28 as string) as test_hive_28 + ,cast(test_hive_93 as string) as test_hive_93 + ,cast(test_hive_94 as string) as test_hive_94 + ,cast(test_hive_95 as string) as test_hive_95 + ,cast(test_hive_99 as string) as test_hive_99 + ,cast(test_hive_105 as string) as test_hive_105 + ,cast(test_hive_83 as string) as test_hive_83 + ,cast(test_hive_84 as string) as test_hive_84 + ,cast(test_hive_100 as string) as test_hive_100 + ,cast(test_hive_1023 as int) as test_hive_1023 + ,cast(test_hive_1024 as int) as test_hive_1024 + ,cast(test_hive_1010 as int) as test_hive_1010 + ,cast(test_hive_1010_a_d as int) as test_hive_1010_a_d + ,cast(test_hive_1010_a_g as int) as test_hive_1010_a_g + ,cast(test_hive_1026 as double) as test_hive_1026 + ,cast(test_hive_1000 as double) as test_hive_1000 + ,cast(test_hive_1001 as double) as test_hive_1001 + ,cast(test_hive_1030 as int) as test_hive_1030 + ,cast(test_hive_1030_1 as int) as test_hive_1030_1 + ,cast(test_hive_1030_2 as int) as test_hive_1030_2 + ,cast(test_hive_1030_3 as int) as test_hive_1030_3 + ,cast(test_hive_1021 as double) as test_hive_1021 + ,cast(test_hive_1020 as double) as test_hive_1020 + ,cast(test_hive_1022 as int) as test_hive_1022 + ,cast(test_hive_1019 as int) as test_hive_1019 + ,cast(test_hive_1027 as double) as test_hive_1027 + ,cast(test_hive_1028 as double) as test_hive_1028 + ,cast(test_hive_1029 as double) as test_hive_1029 + ,cast(test_hive_1005 as int) as test_hive_1005 + ,cast(test_hive_1005_a_d as int) as test_hive_1005_a_d + ,cast(test_hive_1005_psr as int) as test_hive_1005_psr + ,cast(test_hive_1005_psr_a_d as int) as test_hive_1005_psr_a_d + ,cast(test_hive_1005_psr_e as int) as test_hive_1005_psr_e + ,cast(test_hive_1013 as int) as test_hive_1013 + ,cast(test_hive_1013_a_d as int) as test_hive_1013_a_d + ,cast(test_hive_1013_psr as int) as test_hive_1013_psr + ,cast(test_hive_1013_psr_a_d as int) as test_hive_1013_psr_a_d + ,cast(test_hive_1013_psr_e as int) as test_hive_1013_psr_e + ,cast(from_unixtime(unix_timestamp(test_hive_1034,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1034 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1038 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1038 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1040 +POSTHOOK: query: create view if not exists test_hive_1040 +as +select + cast(test_hive_1018 as int) as test_hive_1018 + ,cast(test_hive_1004 as int) as test_hive_1004 + ,cast(test_hive_1025 as int) as test_hive_1025 + ,cast(test_hive_23 as string) as test_hive_23 + ,cast(test_hive_27 as string) as test_hive_27 + ,cast(test_hive_29 as string) as test_hive_29 + ,cast(test_hive_30 as string) as test_hive_30 + ,cast(test_hive_97 as string) as test_hive_97 + ,cast(test_hive_96 as string) as test_hive_96 + ,cast(test_hive_98 as string) as test_hive_98 + ,cast(test_hive_101 as string) as test_hive_101 + ,cast(test_hive_102 as string) as test_hive_102 + ,cast(test_hive_109 as string) as test_hive_109 + ,cast(test_hive_111 as string) as test_hive_111 + ,cast(test_hive_112 as string) as test_hive_112 + ,cast(test_hive_113 as string) as test_hive_113 + ,cast(test_hive_114 as string) as test_hive_114 + ,cast(test_hive_115 as string) as test_hive_115 + ,cast(test_hive_78 as string) as test_hive_78 + ,cast(test_hive_79 as string) as test_hive_79 + ,cast(test_hive_24 as string) as test_hive_24 + ,cast(test_hive_26 as string) as test_hive_26 + ,cast(test_hive_110 as string) as test_hive_110 + ,cast(test_hive_77 as string) as test_hive_77 + ,cast(test_hive_87 as string) as test_hive_87 + ,cast(test_hive_92 as string) as test_hive_92 + ,cast(test_hive_90 as string) as test_hive_90 + ,cast(test_hive_74 as string) as test_hive_74 + ,cast(test_hive_85 as string) as test_hive_85 + ,cast(test_hive_81 as string) as test_hive_81 + ,cast(test_hive_82 as string) as test_hive_82 + ,cast(test_hive_106 as string) as test_hive_106 + ,cast(test_hive_107 as string) as test_hive_107 + ,cast(test_hive_108 as string) as test_hive_108 + ,cast(test_hive_75 as string) as test_hive_75 + ,cast(test_hive_86 as string) as test_hive_86 + ,cast(test_hive_76 as string) as test_hive_76 + ,cast(test_hive_89 as string) as test_hive_89 + ,cast(test_hive_88 as string) as test_hive_88 + ,cast(test_hive_91 as string) as test_hive_91 + ,cast(test_hive_71 as string) as test_hive_71 + ,cast(test_hive_72 as string) as test_hive_72 + ,cast(test_hive_73 as string) as test_hive_73 + ,cast(test_hive_80 as string) as test_hive_80 + ,cast(test_hive_103 as string) as test_hive_103 + ,cast(test_hive_104 as string) as test_hive_104 + ,cast(test_hive_1002 as string) as test_hive_1002 + ,cast(test_hive_1003 as string) as test_hive_1003 + ,cast(from_unixtime(unix_timestamp(test_hive_25,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_25 + ,cast(test_hive_28 as string) as test_hive_28 + ,cast(test_hive_93 as string) as test_hive_93 + ,cast(test_hive_94 as string) as test_hive_94 + ,cast(test_hive_95 as string) as test_hive_95 + ,cast(test_hive_99 as string) as test_hive_99 + ,cast(test_hive_105 as string) as test_hive_105 + ,cast(test_hive_83 as string) as test_hive_83 + ,cast(test_hive_84 as string) as test_hive_84 + ,cast(test_hive_100 as string) as test_hive_100 + ,cast(test_hive_1023 as int) as test_hive_1023 + ,cast(test_hive_1024 as int) as test_hive_1024 + ,cast(test_hive_1010 as int) as test_hive_1010 + ,cast(test_hive_1010_a_d as int) as test_hive_1010_a_d + ,cast(test_hive_1010_a_g as int) as test_hive_1010_a_g + ,cast(test_hive_1026 as double) as test_hive_1026 + ,cast(test_hive_1000 as double) as test_hive_1000 + ,cast(test_hive_1001 as double) as test_hive_1001 + ,cast(test_hive_1030 as int) as test_hive_1030 + ,cast(test_hive_1030_1 as int) as test_hive_1030_1 + ,cast(test_hive_1030_2 as int) as test_hive_1030_2 + ,cast(test_hive_1030_3 as int) as test_hive_1030_3 + ,cast(test_hive_1021 as double) as test_hive_1021 + ,cast(test_hive_1020 as double) as test_hive_1020 + ,cast(test_hive_1022 as int) as test_hive_1022 + ,cast(test_hive_1019 as int) as test_hive_1019 + ,cast(test_hive_1027 as double) as test_hive_1027 + ,cast(test_hive_1028 as double) as test_hive_1028 + ,cast(test_hive_1029 as double) as test_hive_1029 + ,cast(test_hive_1005 as int) as test_hive_1005 + ,cast(test_hive_1005_a_d as int) as test_hive_1005_a_d + ,cast(test_hive_1005_psr as int) as test_hive_1005_psr + ,cast(test_hive_1005_psr_a_d as int) as test_hive_1005_psr_a_d + ,cast(test_hive_1005_psr_e as int) as test_hive_1005_psr_e + ,cast(test_hive_1013 as int) as test_hive_1013 + ,cast(test_hive_1013_a_d as int) as test_hive_1013_a_d + ,cast(test_hive_1013_psr as int) as test_hive_1013_psr + ,cast(test_hive_1013_psr_a_d as int) as test_hive_1013_psr_a_d + ,cast(test_hive_1013_psr_e as int) as test_hive_1013_psr_e + ,cast(from_unixtime(unix_timestamp(test_hive_1034,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1034 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1038 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1038 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1040 +POSTHOOK: Lineage: test_hive_1040.creation_date EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.ds EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.ds_ts SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.source_file_name SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_100 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_100, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1000 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1000, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1001 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1001, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1002 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1002, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1003 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1003, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1004 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1004, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1005 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1005_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1005_psr EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1005_psr_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1005_psr_e EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr_e, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_101 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_101, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1010 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1010_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1010_a_g EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010_a_g, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1013 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1013_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1013_psr EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1013_psr_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1013_psr_e EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr_e, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1018 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1018, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1019 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1019, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_102 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_102, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1020 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1020, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1021 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1021, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1022 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1022, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1023 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1023, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1024 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1024, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1025 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1025, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1026 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1026, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1027 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1027, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1028 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1028, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1029 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1029, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_103 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_103, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1030 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1030_1 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_1, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1030_2 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_2, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1030_3 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_3, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_1034 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1034, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_104 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_104, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_105 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_105, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_106 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_106, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_107 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_107, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_108 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_108, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_109 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_109, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_110 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_110, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_111 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_111, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_112 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_112, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_113 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_113, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_114 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_114, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_115 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_115, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_23 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_23, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_24 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_24, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_25 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_25, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_26 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_26, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_27 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_27, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_28 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_28, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_29 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_29, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_30 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_30, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_71 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_71, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_72 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_72, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_73 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_73, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_74 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_74, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_75 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_75, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_76 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_76, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_77 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_77, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_78 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_78, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_79 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_79, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_80 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_80, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_81 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_81, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_82 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_82, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_83 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_83, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_84 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_84, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_85 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_85, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_86 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_86, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_87 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_87, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_88 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_88, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_89 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_89, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_90 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_90, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_91 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_91, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_92 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_92, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_93 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_93, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_94 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_94, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_95 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_95, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_96 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_96, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_97 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_97, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_98 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_98, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.test_hive_99 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_99, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1040.ts EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1039 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1039 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1039 +as +select + test_hive_1018 as test_hive_1018 + ,test_hive_1004 as test_hive_1004 + ,test_hive_1025 as test_hive_1025 + ,test_hive_23 as test_hive_23 + ,test_hive_27 as test_hive_27 + ,test_hive_29 as test_hive_29 + ,test_hive_30 as test_hive_30 + ,test_hive_97 as test_hive_97 + ,test_hive_96 as test_hive_96 + ,test_hive_98 as test_hive_98 + ,test_hive_101 as test_hive_101 + ,test_hive_102 as test_hive_102 + ,test_hive_109 as test_hive_109 + ,test_hive_111 as test_hive_111 + ,test_hive_112 as test_hive_112 + ,test_hive_113 as test_hive_113 + ,test_hive_114 as test_hive_114 + ,test_hive_115 as test_hive_115 + ,test_hive_78 as test_hive_78 + ,test_hive_79 as test_hive_79 + ,test_hive_24 as test_hive_24 + ,test_hive_26 as test_hive_26 + ,test_hive_110 as test_hive_110 + ,test_hive_77 as test_hive_77 + ,test_hive_87 as test_hive_87 + ,test_hive_92 as test_hive_92 + ,test_hive_90 as test_hive_90 + ,test_hive_74 as test_hive_74 + ,test_hive_85 as test_hive_85 + ,test_hive_81 as test_hive_81 + ,test_hive_82 as test_hive_82 + ,test_hive_106 as test_hive_106 + ,test_hive_107 as test_hive_107 + ,test_hive_108 as test_hive_108 + ,test_hive_75 as test_hive_75 + ,test_hive_86 as test_hive_86 + ,test_hive_76 as test_hive_76 + ,test_hive_89 as test_hive_89 + ,test_hive_88 as test_hive_88 + ,test_hive_91 as test_hive_91 + ,test_hive_71 as test_hive_71 + ,test_hive_72 as test_hive_72 + ,test_hive_73 as test_hive_73 + ,test_hive_80 as test_hive_80 + ,test_hive_103 as test_hive_103 + ,test_hive_104 as test_hive_104 + ,test_hive_1002 as test_hive_1002 + ,test_hive_1003 as test_hive_1003 + ,test_hive_25 as test_hive_25 + ,test_hive_28 as test_hive_28 + ,test_hive_93 as test_hive_93 + ,test_hive_94 as test_hive_94 + ,test_hive_95 as test_hive_95 + ,test_hive_99 as test_hive_99 + ,test_hive_105 as test_hive_105 + ,test_hive_83 as test_hive_83 + ,test_hive_84 as test_hive_84 + ,test_hive_100 as test_hive_100 + ,test_hive_1023 as test_hive_1023 + ,test_hive_1024 as test_hive_1024 + ,test_hive_1010 as test_hive_1010 + ,test_hive_1010_a_d as test_hive_1010_a_d + ,test_hive_1010_a_g as test_hive_1010_a_g + ,test_hive_1026 as test_hive_1026 + ,test_hive_1000 as test_hive_1000 + ,test_hive_1001 as test_hive_1001 + ,test_hive_1030 as test_hive_1030 + ,test_hive_1030_1 as test_hive_1030_1 + ,test_hive_1030_2 as test_hive_1030_2 + ,test_hive_1030_3 as test_hive_1030_3 + ,test_hive_1021 as test_hive_1021 + ,test_hive_1020 as test_hive_1020 + ,test_hive_1022 as test_hive_1022 + ,test_hive_1019 as test_hive_1019 + ,test_hive_1027 as test_hive_1027 + ,test_hive_1028 as test_hive_1028 + ,test_hive_1029 as test_hive_1029 + ,test_hive_1005 as test_hive_1005 + ,test_hive_1005_a_d as test_hive_1005_a_d + ,test_hive_1005_psr as test_hive_1005_psr + ,test_hive_1005_psr_a_d as test_hive_1005_psr_a_d + ,test_hive_1005_psr_e as test_hive_1005_psr_e + ,test_hive_1013 as test_hive_1013 + ,test_hive_1013_a_d as test_hive_1013_a_d + ,test_hive_1013_psr as test_hive_1013_psr + ,test_hive_1013_psr_a_d as test_hive_1013_psr_a_d + ,test_hive_1013_psr_e as test_hive_1013_psr_e + ,test_hive_1034 as test_hive_1034 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1040 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1038 +PREHOOK: Input: default@test_hive_1040 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1039 +POSTHOOK: query: create view test_hive_1039 +as +select + test_hive_1018 as test_hive_1018 + ,test_hive_1004 as test_hive_1004 + ,test_hive_1025 as test_hive_1025 + ,test_hive_23 as test_hive_23 + ,test_hive_27 as test_hive_27 + ,test_hive_29 as test_hive_29 + ,test_hive_30 as test_hive_30 + ,test_hive_97 as test_hive_97 + ,test_hive_96 as test_hive_96 + ,test_hive_98 as test_hive_98 + ,test_hive_101 as test_hive_101 + ,test_hive_102 as test_hive_102 + ,test_hive_109 as test_hive_109 + ,test_hive_111 as test_hive_111 + ,test_hive_112 as test_hive_112 + ,test_hive_113 as test_hive_113 + ,test_hive_114 as test_hive_114 + ,test_hive_115 as test_hive_115 + ,test_hive_78 as test_hive_78 + ,test_hive_79 as test_hive_79 + ,test_hive_24 as test_hive_24 + ,test_hive_26 as test_hive_26 + ,test_hive_110 as test_hive_110 + ,test_hive_77 as test_hive_77 + ,test_hive_87 as test_hive_87 + ,test_hive_92 as test_hive_92 + ,test_hive_90 as test_hive_90 + ,test_hive_74 as test_hive_74 + ,test_hive_85 as test_hive_85 + ,test_hive_81 as test_hive_81 + ,test_hive_82 as test_hive_82 + ,test_hive_106 as test_hive_106 + ,test_hive_107 as test_hive_107 + ,test_hive_108 as test_hive_108 + ,test_hive_75 as test_hive_75 + ,test_hive_86 as test_hive_86 + ,test_hive_76 as test_hive_76 + ,test_hive_89 as test_hive_89 + ,test_hive_88 as test_hive_88 + ,test_hive_91 as test_hive_91 + ,test_hive_71 as test_hive_71 + ,test_hive_72 as test_hive_72 + ,test_hive_73 as test_hive_73 + ,test_hive_80 as test_hive_80 + ,test_hive_103 as test_hive_103 + ,test_hive_104 as test_hive_104 + ,test_hive_1002 as test_hive_1002 + ,test_hive_1003 as test_hive_1003 + ,test_hive_25 as test_hive_25 + ,test_hive_28 as test_hive_28 + ,test_hive_93 as test_hive_93 + ,test_hive_94 as test_hive_94 + ,test_hive_95 as test_hive_95 + ,test_hive_99 as test_hive_99 + ,test_hive_105 as test_hive_105 + ,test_hive_83 as test_hive_83 + ,test_hive_84 as test_hive_84 + ,test_hive_100 as test_hive_100 + ,test_hive_1023 as test_hive_1023 + ,test_hive_1024 as test_hive_1024 + ,test_hive_1010 as test_hive_1010 + ,test_hive_1010_a_d as test_hive_1010_a_d + ,test_hive_1010_a_g as test_hive_1010_a_g + ,test_hive_1026 as test_hive_1026 + ,test_hive_1000 as test_hive_1000 + ,test_hive_1001 as test_hive_1001 + ,test_hive_1030 as test_hive_1030 + ,test_hive_1030_1 as test_hive_1030_1 + ,test_hive_1030_2 as test_hive_1030_2 + ,test_hive_1030_3 as test_hive_1030_3 + ,test_hive_1021 as test_hive_1021 + ,test_hive_1020 as test_hive_1020 + ,test_hive_1022 as test_hive_1022 + ,test_hive_1019 as test_hive_1019 + ,test_hive_1027 as test_hive_1027 + ,test_hive_1028 as test_hive_1028 + ,test_hive_1029 as test_hive_1029 + ,test_hive_1005 as test_hive_1005 + ,test_hive_1005_a_d as test_hive_1005_a_d + ,test_hive_1005_psr as test_hive_1005_psr + ,test_hive_1005_psr_a_d as test_hive_1005_psr_a_d + ,test_hive_1005_psr_e as test_hive_1005_psr_e + ,test_hive_1013 as test_hive_1013 + ,test_hive_1013_a_d as test_hive_1013_a_d + ,test_hive_1013_psr as test_hive_1013_psr + ,test_hive_1013_psr_a_d as test_hive_1013_psr_a_d + ,test_hive_1013_psr_e as test_hive_1013_psr_e + ,test_hive_1034 as test_hive_1034 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1040 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1038 +POSTHOOK: Input: default@test_hive_1040 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1039 +POSTHOOK: Lineage: test_hive_1039.creation_date EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.ds EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.ds_ts SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.source_file_name SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_100 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_100, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1000 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1000, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1001 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1001, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1002 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1002, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1003 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1003, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1004 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1004, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1005 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1005_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1005_psr EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1005_psr_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1005_psr_e EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr_e, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_101 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_101, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1010 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1010_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1010_a_g EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010_a_g, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1013 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1013_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1013_psr EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1013_psr_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1013_psr_e EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr_e, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1018 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1018, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1019 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1019, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_102 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_102, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1020 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1020, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1021 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1021, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1022 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1022, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1023 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1023, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1024 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1024, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1025 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1025, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1026 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1026, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1027 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1027, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1028 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1028, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1029 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1029, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_103 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_103, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1030 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1030_1 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_1, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1030_2 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_2, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1030_3 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_3, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_1034 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1034, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_104 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_104, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_105 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_105, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_106 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_106, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_107 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_107, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_108 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_108, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_109 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_109, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_110 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_110, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_111 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_111, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_112 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_112, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_113 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_113, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_114 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_114, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_115 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_115, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_23 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_23, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_24 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_24, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_25 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_25, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_26 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_26, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_27 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_27, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_28 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_28, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_29 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_29, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_30 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_30, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_71 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_71, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_72 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_72, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_73 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_73, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_74 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_74, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_75 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_75, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_76 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_76, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_77 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_77, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_78 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_78, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_79 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_79, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_80 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_80, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_81 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_81, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_82 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_82, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_83 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_83, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_84 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_84, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_85 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_85, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_86 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_86, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_87 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_87, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_88 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_88, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_89 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_89, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_90 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_90, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_91 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_91, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_92 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_92, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_93 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_93, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_94 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_94, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_95 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_95, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_96 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_96, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_97 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_97, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_98 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_98, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.test_hive_99 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_99, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1039.ts EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1036 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1036 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1036 +as +select t1.* +from test_hive_1039 t1 +inner join test_hive_1037 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1037 +PREHOOK: Input: default@test_hive_1038 +PREHOOK: Input: default@test_hive_1039 +PREHOOK: Input: default@test_hive_1040 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1036 +POSTHOOK: query: create view test_hive_1036 +as +select t1.* +from test_hive_1039 t1 +inner join test_hive_1037 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1037 +POSTHOOK: Input: default@test_hive_1038 +POSTHOOK: Input: default@test_hive_1039 +POSTHOOK: Input: default@test_hive_1040 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1036 +POSTHOOK: Lineage: test_hive_1036.creation_date EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.ds EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.ds_ts SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.source_file_name SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_100 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_100, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1000 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1000, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1001 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1001, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1002 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1002, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1003 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1003, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1004 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1004, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1005 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1005_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1005_psr EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1005_psr_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1005_psr_e EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1005_psr_e, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_101 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_101, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1010 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1010_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1010_a_g EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1010_a_g, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1013 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1013_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1013_psr EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1013_psr_a_d EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1013_psr_e EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1013_psr_e, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1018 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1018, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1019 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1019, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_102 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_102, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1020 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1020, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1021 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1021, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1022 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1022, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1023 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1023, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1024 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1024, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1025 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1025, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1026 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1026, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1027 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1027, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1028 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1028, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1029 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1029, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_103 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_103, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1030 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1030_1 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_1, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1030_2 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_2, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1030_3 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1030_3, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_1034 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_1034, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_104 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_104, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_105 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_105, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_106 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_106, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_107 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_107, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_108 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_108, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_109 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_109, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_110 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_110, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_111 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_111, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_112 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_112, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_113 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_113, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_114 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_114, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_115 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_115, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_23 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_23, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_24 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_24, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_25 EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_25, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_26 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_26, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_27 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_27, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_28 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_28, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_29 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_29, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_30 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_30, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_71 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_71, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_72 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_72, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_73 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_73, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_74 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_74, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_75 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_75, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_76 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_76, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_77 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_77, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_78 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_78, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_79 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_79, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_80 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_80, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_81 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_81, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_82 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_82, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_83 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_83, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_84 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_84, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_85 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_85, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_86 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_86, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_87 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_87, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_88 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_88, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_89 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_89, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_90 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_90, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_91 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_91, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_92 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_92, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_93 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_93, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_94 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_94, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_95 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_95, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_96 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_96, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_97 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_97, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_98 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_98, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.test_hive_99 SIMPLE [(test_hive_1038)test_hive_1038.FieldSchema(name:test_hive_99, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1036.ts EXPRESSION [(test_hive_1038)test_hive_1038.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1054 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1054 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1054 +( + test_hive_1047 string + ,test_hive_1045 string + ,test_hive_1048 string + ,test_hive_132 string + ,test_hive_146 string + ,test_hive_1043 string + ,test_hive_149 string + ,test_hive_150 string + ,test_hive_119 string + ,test_hive_118 string + ,test_hive_120 string + ,test_hive_151 string + ,test_hive_116 string + ,test_hive_117 string + ,test_hive_121 string + ,test_hive_122 string + ,test_hive_152 string + ,test_hive_155 string + ,test_hive_159 string + ,test_hive_131 string + ,test_hive_140 string + ,test_hive_145 string + ,test_hive_143 string + ,test_hive_128 string + ,test_hive_138 string + ,test_hive_134 string + ,test_hive_135 string + ,test_hive_156 string + ,test_hive_157 string + ,test_hive_158 string + ,test_hive_129 string + ,test_hive_139 string + ,test_hive_130 string + ,test_hive_142 string + ,test_hive_141 string + ,test_hive_144 string + ,test_hive_125 string + ,test_hive_126 string + ,test_hive_127 string + ,test_hive_133 string + ,test_hive_154 string + ,test_hive_123 string + ,test_hive_160 string + ,test_hive_136 string + ,test_hive_137 string + ,test_hive_124 string + ,test_hive_153 string + ,test_hive_148 string + ,test_hive_147 string + ,test_hive_1052 string + ,test_hive_1051 string + ,test_hive_1041 string + ,test_hive_1042 string + ,test_hive_1044 string + ,test_hive_1046 string + ,test_hive_1050 string + ,test_hive_1049 string + ,test_hive_1053 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1054 +POSTHOOK: query: create table test_hive_1054 +( + test_hive_1047 string + ,test_hive_1045 string + ,test_hive_1048 string + ,test_hive_132 string + ,test_hive_146 string + ,test_hive_1043 string + ,test_hive_149 string + ,test_hive_150 string + ,test_hive_119 string + ,test_hive_118 string + ,test_hive_120 string + ,test_hive_151 string + ,test_hive_116 string + ,test_hive_117 string + ,test_hive_121 string + ,test_hive_122 string + ,test_hive_152 string + ,test_hive_155 string + ,test_hive_159 string + ,test_hive_131 string + ,test_hive_140 string + ,test_hive_145 string + ,test_hive_143 string + ,test_hive_128 string + ,test_hive_138 string + ,test_hive_134 string + ,test_hive_135 string + ,test_hive_156 string + ,test_hive_157 string + ,test_hive_158 string + ,test_hive_129 string + ,test_hive_139 string + ,test_hive_130 string + ,test_hive_142 string + ,test_hive_141 string + ,test_hive_144 string + ,test_hive_125 string + ,test_hive_126 string + ,test_hive_127 string + ,test_hive_133 string + ,test_hive_154 string + ,test_hive_123 string + ,test_hive_160 string + ,test_hive_136 string + ,test_hive_137 string + ,test_hive_124 string + ,test_hive_153 string + ,test_hive_148 string + ,test_hive_147 string + ,test_hive_1052 string + ,test_hive_1051 string + ,test_hive_1041 string + ,test_hive_1042 string + ,test_hive_1044 string + ,test_hive_1046 string + ,test_hive_1050 string + ,test_hive_1049 string + ,test_hive_1053 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1054 +PREHOOK: query: create table if not exists test_hive_1057 +( + test_hive_1047 string + ,test_hive_1045 string + ,test_hive_1048 string + ,test_hive_132 string + ,test_hive_146 string + ,test_hive_1043 string + ,test_hive_149 string + ,test_hive_150 string + ,test_hive_119 string + ,test_hive_118 string + ,test_hive_120 string + ,test_hive_151 string + ,test_hive_116 string + ,test_hive_117 string + ,test_hive_121 string + ,test_hive_122 string + ,test_hive_152 string + ,test_hive_155 string + ,test_hive_159 string + ,test_hive_131 string + ,test_hive_140 string + ,test_hive_145 string + ,test_hive_143 string + ,test_hive_128 string + ,test_hive_138 string + ,test_hive_134 string + ,test_hive_135 string + ,test_hive_156 string + ,test_hive_157 string + ,test_hive_158 string + ,test_hive_129 string + ,test_hive_139 string + ,test_hive_130 string + ,test_hive_142 string + ,test_hive_141 string + ,test_hive_144 string + ,test_hive_125 string + ,test_hive_126 string + ,test_hive_127 string + ,test_hive_133 string + ,test_hive_154 string + ,test_hive_123 string + ,test_hive_160 string + ,test_hive_136 string + ,test_hive_137 string + ,test_hive_124 string + ,test_hive_153 string + ,test_hive_148 string + ,test_hive_147 string + ,test_hive_1052 string + ,test_hive_1051 string + ,test_hive_1041 string + ,test_hive_1042 string + ,test_hive_1044 string + ,test_hive_1046 string + ,test_hive_1050 string + ,test_hive_1049 string + ,test_hive_1053 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1057 +POSTHOOK: query: create table if not exists test_hive_1057 +( + test_hive_1047 string + ,test_hive_1045 string + ,test_hive_1048 string + ,test_hive_132 string + ,test_hive_146 string + ,test_hive_1043 string + ,test_hive_149 string + ,test_hive_150 string + ,test_hive_119 string + ,test_hive_118 string + ,test_hive_120 string + ,test_hive_151 string + ,test_hive_116 string + ,test_hive_117 string + ,test_hive_121 string + ,test_hive_122 string + ,test_hive_152 string + ,test_hive_155 string + ,test_hive_159 string + ,test_hive_131 string + ,test_hive_140 string + ,test_hive_145 string + ,test_hive_143 string + ,test_hive_128 string + ,test_hive_138 string + ,test_hive_134 string + ,test_hive_135 string + ,test_hive_156 string + ,test_hive_157 string + ,test_hive_158 string + ,test_hive_129 string + ,test_hive_139 string + ,test_hive_130 string + ,test_hive_142 string + ,test_hive_141 string + ,test_hive_144 string + ,test_hive_125 string + ,test_hive_126 string + ,test_hive_127 string + ,test_hive_133 string + ,test_hive_154 string + ,test_hive_123 string + ,test_hive_160 string + ,test_hive_136 string + ,test_hive_137 string + ,test_hive_124 string + ,test_hive_153 string + ,test_hive_148 string + ,test_hive_147 string + ,test_hive_1052 string + ,test_hive_1051 string + ,test_hive_1041 string + ,test_hive_1042 string + ,test_hive_1044 string + ,test_hive_1046 string + ,test_hive_1050 string + ,test_hive_1049 string + ,test_hive_1053 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1057 +PREHOOK: query: drop table if exists test_hive_1056 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1056 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1056 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1056 +POSTHOOK: query: create table if not exists test_hive_1056 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1056 +PREHOOK: query: drop view if exists test_hive_1059 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1059 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1059 +as +select + cast(test_hive_1047 as int) as test_hive_1047 + ,cast(test_hive_1045 as int) as test_hive_1045 + ,cast(test_hive_1048 as int) as test_hive_1048 + ,cast(test_hive_132 as string) as test_hive_132 + ,cast(test_hive_146 as string) as test_hive_146 + ,cast(test_hive_1043 as string) as test_hive_1043 + ,cast(test_hive_149 as string) as test_hive_149 + ,cast(test_hive_150 as string) as test_hive_150 + ,cast(from_unixtime(unix_timestamp(test_hive_119,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_119 + ,cast(test_hive_118 as string) as test_hive_118 + ,cast(test_hive_120 as string) as test_hive_120 + ,cast(test_hive_151 as string) as test_hive_151 + ,cast(test_hive_116 as string) as test_hive_116 + ,cast(test_hive_117 as string) as test_hive_117 + ,cast(test_hive_121 as string) as test_hive_121 + ,cast(test_hive_122 as string) as test_hive_122 + ,cast(test_hive_152 as string) as test_hive_152 + ,cast(test_hive_155 as string) as test_hive_155 + ,cast(test_hive_159 as string) as test_hive_159 + ,cast(test_hive_131 as string) as test_hive_131 + ,cast(test_hive_140 as string) as test_hive_140 + ,cast(test_hive_145 as string) as test_hive_145 + ,cast(test_hive_143 as string) as test_hive_143 + ,cast(test_hive_128 as string) as test_hive_128 + ,cast(test_hive_138 as string) as test_hive_138 + ,cast(test_hive_134 as string) as test_hive_134 + ,cast(test_hive_135 as string) as test_hive_135 + ,cast(test_hive_156 as string) as test_hive_156 + ,cast(test_hive_157 as string) as test_hive_157 + ,cast(test_hive_158 as string) as test_hive_158 + ,cast(test_hive_129 as string) as test_hive_129 + ,cast(test_hive_139 as string) as test_hive_139 + ,cast(test_hive_130 as string) as test_hive_130 + ,cast(test_hive_142 as string) as test_hive_142 + ,cast(test_hive_141 as string) as test_hive_141 + ,cast(test_hive_144 as string) as test_hive_144 + ,cast(test_hive_125 as string) as test_hive_125 + ,cast(test_hive_126 as string) as test_hive_126 + ,cast(test_hive_127 as string) as test_hive_127 + ,cast(test_hive_133 as string) as test_hive_133 + ,cast(test_hive_154 as string) as test_hive_154 + ,cast(test_hive_123 as string) as test_hive_123 + ,cast(test_hive_160 as string) as test_hive_160 + ,cast(test_hive_136 as string) as test_hive_136 + ,cast(test_hive_137 as string) as test_hive_137 + ,cast(test_hive_124 as string) as test_hive_124 + ,cast(test_hive_153 as string) as test_hive_153 + ,cast(test_hive_148 as string) as test_hive_148 + ,cast(test_hive_147 as string) as test_hive_147 + ,cast(test_hive_1052 as int) as test_hive_1052 + ,cast(test_hive_1051 as int) as test_hive_1051 + ,cast(test_hive_1041 as int) as test_hive_1041 + ,cast(test_hive_1042 as int) as test_hive_1042 + ,cast(test_hive_1044 as int) as test_hive_1044 + ,cast(test_hive_1046 as int) as test_hive_1046 + ,cast(test_hive_1050 as int) as test_hive_1050 + ,cast(test_hive_1049 as int) as test_hive_1049 + ,cast(from_unixtime(unix_timestamp(test_hive_1053,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1053 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1057 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1057 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1059 +POSTHOOK: query: create view if not exists test_hive_1059 +as +select + cast(test_hive_1047 as int) as test_hive_1047 + ,cast(test_hive_1045 as int) as test_hive_1045 + ,cast(test_hive_1048 as int) as test_hive_1048 + ,cast(test_hive_132 as string) as test_hive_132 + ,cast(test_hive_146 as string) as test_hive_146 + ,cast(test_hive_1043 as string) as test_hive_1043 + ,cast(test_hive_149 as string) as test_hive_149 + ,cast(test_hive_150 as string) as test_hive_150 + ,cast(from_unixtime(unix_timestamp(test_hive_119,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_119 + ,cast(test_hive_118 as string) as test_hive_118 + ,cast(test_hive_120 as string) as test_hive_120 + ,cast(test_hive_151 as string) as test_hive_151 + ,cast(test_hive_116 as string) as test_hive_116 + ,cast(test_hive_117 as string) as test_hive_117 + ,cast(test_hive_121 as string) as test_hive_121 + ,cast(test_hive_122 as string) as test_hive_122 + ,cast(test_hive_152 as string) as test_hive_152 + ,cast(test_hive_155 as string) as test_hive_155 + ,cast(test_hive_159 as string) as test_hive_159 + ,cast(test_hive_131 as string) as test_hive_131 + ,cast(test_hive_140 as string) as test_hive_140 + ,cast(test_hive_145 as string) as test_hive_145 + ,cast(test_hive_143 as string) as test_hive_143 + ,cast(test_hive_128 as string) as test_hive_128 + ,cast(test_hive_138 as string) as test_hive_138 + ,cast(test_hive_134 as string) as test_hive_134 + ,cast(test_hive_135 as string) as test_hive_135 + ,cast(test_hive_156 as string) as test_hive_156 + ,cast(test_hive_157 as string) as test_hive_157 + ,cast(test_hive_158 as string) as test_hive_158 + ,cast(test_hive_129 as string) as test_hive_129 + ,cast(test_hive_139 as string) as test_hive_139 + ,cast(test_hive_130 as string) as test_hive_130 + ,cast(test_hive_142 as string) as test_hive_142 + ,cast(test_hive_141 as string) as test_hive_141 + ,cast(test_hive_144 as string) as test_hive_144 + ,cast(test_hive_125 as string) as test_hive_125 + ,cast(test_hive_126 as string) as test_hive_126 + ,cast(test_hive_127 as string) as test_hive_127 + ,cast(test_hive_133 as string) as test_hive_133 + ,cast(test_hive_154 as string) as test_hive_154 + ,cast(test_hive_123 as string) as test_hive_123 + ,cast(test_hive_160 as string) as test_hive_160 + ,cast(test_hive_136 as string) as test_hive_136 + ,cast(test_hive_137 as string) as test_hive_137 + ,cast(test_hive_124 as string) as test_hive_124 + ,cast(test_hive_153 as string) as test_hive_153 + ,cast(test_hive_148 as string) as test_hive_148 + ,cast(test_hive_147 as string) as test_hive_147 + ,cast(test_hive_1052 as int) as test_hive_1052 + ,cast(test_hive_1051 as int) as test_hive_1051 + ,cast(test_hive_1041 as int) as test_hive_1041 + ,cast(test_hive_1042 as int) as test_hive_1042 + ,cast(test_hive_1044 as int) as test_hive_1044 + ,cast(test_hive_1046 as int) as test_hive_1046 + ,cast(test_hive_1050 as int) as test_hive_1050 + ,cast(test_hive_1049 as int) as test_hive_1049 + ,cast(from_unixtime(unix_timestamp(test_hive_1053,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1053 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1057 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1057 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1059 +POSTHOOK: Lineage: test_hive_1059.creation_date EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.ds EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.ds_ts SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.source_file_name SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1041 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1041, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1042 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1042, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1043 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1043, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1044 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1044, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1045 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1045, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1046 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1046, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1047 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1047, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1048 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1048, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1049 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1049, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1050 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1050, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1051 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1051, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1052 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1052, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_1053 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1053, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_116 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_116, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_117 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_117, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_118 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_118, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_119 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_119, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_120 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_120, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_121 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_121, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_122 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_122, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_123 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_123, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_124 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_124, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_125 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_125, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_126 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_126, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_127 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_127, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_128 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_128, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_129 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_129, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_130 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_130, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_131 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_131, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_132 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_132, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_133 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_133, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_134 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_134, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_135 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_135, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_136 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_136, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_137 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_137, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_138 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_138, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_139 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_139, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_140 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_140, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_141 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_141, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_142 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_142, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_143 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_143, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_144 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_144, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_145 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_145, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_146 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_146, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_147 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_147, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_148 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_148, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_149 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_149, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_150 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_150, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_151 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_151, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_152 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_152, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_153 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_153, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_154 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_154, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_155 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_155, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_156 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_156, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_157 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_157, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_158 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_158, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_159 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_159, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.test_hive_160 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_160, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1059.ts EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1058 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1058 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1058 +as +select + test_hive_1047 as test_hive_1047 + ,test_hive_1045 as test_hive_1045 + ,test_hive_1048 as test_hive_1048 + ,test_hive_132 as test_hive_132 + ,test_hive_146 as test_hive_146 + ,test_hive_1043 as test_hive_1043 + ,test_hive_149 as test_hive_149 + ,test_hive_150 as test_hive_150 + ,test_hive_119 as test_hive_119 + ,test_hive_118 as test_hive_118 + ,test_hive_120 as test_hive_120 + ,test_hive_151 as test_hive_151 + ,test_hive_116 as test_hive_116 + ,test_hive_117 as test_hive_117 + ,test_hive_121 as test_hive_121 + ,test_hive_122 as test_hive_122 + ,test_hive_152 as test_hive_152 + ,test_hive_155 as test_hive_155 + ,test_hive_159 as test_hive_159 + ,test_hive_131 as test_hive_131 + ,test_hive_140 as test_hive_140 + ,test_hive_145 as test_hive_145 + ,test_hive_143 as test_hive_143 + ,test_hive_128 as test_hive_128 + ,test_hive_138 as test_hive_138 + ,test_hive_134 as test_hive_134 + ,test_hive_135 as test_hive_135 + ,test_hive_156 as test_hive_156 + ,test_hive_157 as test_hive_157 + ,test_hive_158 as test_hive_158 + ,test_hive_129 as test_hive_129 + ,test_hive_139 as test_hive_139 + ,test_hive_130 as test_hive_130 + ,test_hive_142 as test_hive_142 + ,test_hive_141 as test_hive_141 + ,test_hive_144 as test_hive_144 + ,test_hive_125 as test_hive_125 + ,test_hive_126 as test_hive_126 + ,test_hive_127 as test_hive_127 + ,test_hive_133 as test_hive_133 + ,test_hive_154 as test_hive_154 + ,test_hive_123 as test_hive_123 + ,test_hive_160 as test_hive_160 + ,test_hive_136 as test_hive_136 + ,test_hive_137 as test_hive_137 + ,test_hive_124 as test_hive_124 + ,test_hive_153 as test_hive_153 + ,test_hive_148 as test_hive_148 + ,test_hive_147 as test_hive_147 + ,test_hive_1052 as test_hive_1052 + ,test_hive_1051 as test_hive_1051 + ,test_hive_1041 as test_hive_1041 + ,test_hive_1042 as test_hive_1042 + ,test_hive_1044 as test_hive_1044 + ,test_hive_1046 as test_hive_1046 + ,test_hive_1050 as test_hive_1050 + ,test_hive_1049 as test_hive_1049 + ,test_hive_1053 as test_hive_1053 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1059 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1057 +PREHOOK: Input: default@test_hive_1059 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1058 +POSTHOOK: query: create view test_hive_1058 +as +select + test_hive_1047 as test_hive_1047 + ,test_hive_1045 as test_hive_1045 + ,test_hive_1048 as test_hive_1048 + ,test_hive_132 as test_hive_132 + ,test_hive_146 as test_hive_146 + ,test_hive_1043 as test_hive_1043 + ,test_hive_149 as test_hive_149 + ,test_hive_150 as test_hive_150 + ,test_hive_119 as test_hive_119 + ,test_hive_118 as test_hive_118 + ,test_hive_120 as test_hive_120 + ,test_hive_151 as test_hive_151 + ,test_hive_116 as test_hive_116 + ,test_hive_117 as test_hive_117 + ,test_hive_121 as test_hive_121 + ,test_hive_122 as test_hive_122 + ,test_hive_152 as test_hive_152 + ,test_hive_155 as test_hive_155 + ,test_hive_159 as test_hive_159 + ,test_hive_131 as test_hive_131 + ,test_hive_140 as test_hive_140 + ,test_hive_145 as test_hive_145 + ,test_hive_143 as test_hive_143 + ,test_hive_128 as test_hive_128 + ,test_hive_138 as test_hive_138 + ,test_hive_134 as test_hive_134 + ,test_hive_135 as test_hive_135 + ,test_hive_156 as test_hive_156 + ,test_hive_157 as test_hive_157 + ,test_hive_158 as test_hive_158 + ,test_hive_129 as test_hive_129 + ,test_hive_139 as test_hive_139 + ,test_hive_130 as test_hive_130 + ,test_hive_142 as test_hive_142 + ,test_hive_141 as test_hive_141 + ,test_hive_144 as test_hive_144 + ,test_hive_125 as test_hive_125 + ,test_hive_126 as test_hive_126 + ,test_hive_127 as test_hive_127 + ,test_hive_133 as test_hive_133 + ,test_hive_154 as test_hive_154 + ,test_hive_123 as test_hive_123 + ,test_hive_160 as test_hive_160 + ,test_hive_136 as test_hive_136 + ,test_hive_137 as test_hive_137 + ,test_hive_124 as test_hive_124 + ,test_hive_153 as test_hive_153 + ,test_hive_148 as test_hive_148 + ,test_hive_147 as test_hive_147 + ,test_hive_1052 as test_hive_1052 + ,test_hive_1051 as test_hive_1051 + ,test_hive_1041 as test_hive_1041 + ,test_hive_1042 as test_hive_1042 + ,test_hive_1044 as test_hive_1044 + ,test_hive_1046 as test_hive_1046 + ,test_hive_1050 as test_hive_1050 + ,test_hive_1049 as test_hive_1049 + ,test_hive_1053 as test_hive_1053 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1059 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1057 +POSTHOOK: Input: default@test_hive_1059 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1058 +POSTHOOK: Lineage: test_hive_1058.creation_date EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.ds EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.ds_ts SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.source_file_name SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1041 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1041, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1042 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1042, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1043 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1043, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1044 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1044, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1045 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1045, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1046 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1046, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1047 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1047, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1048 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1048, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1049 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1049, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1050 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1050, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1051 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1051, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1052 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1052, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_1053 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1053, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_116 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_116, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_117 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_117, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_118 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_118, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_119 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_119, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_120 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_120, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_121 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_121, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_122 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_122, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_123 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_123, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_124 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_124, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_125 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_125, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_126 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_126, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_127 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_127, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_128 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_128, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_129 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_129, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_130 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_130, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_131 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_131, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_132 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_132, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_133 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_133, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_134 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_134, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_135 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_135, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_136 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_136, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_137 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_137, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_138 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_138, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_139 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_139, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_140 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_140, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_141 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_141, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_142 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_142, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_143 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_143, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_144 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_144, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_145 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_145, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_146 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_146, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_147 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_147, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_148 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_148, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_149 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_149, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_150 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_150, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_151 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_151, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_152 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_152, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_153 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_153, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_154 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_154, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_155 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_155, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_156 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_156, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_157 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_157, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_158 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_158, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_159 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_159, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.test_hive_160 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_160, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1058.ts EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1055 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1055 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1055 +as +select t1.* +from test_hive_1058 t1 +inner join test_hive_1056 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1056 +PREHOOK: Input: default@test_hive_1057 +PREHOOK: Input: default@test_hive_1058 +PREHOOK: Input: default@test_hive_1059 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1055 +POSTHOOK: query: create view test_hive_1055 +as +select t1.* +from test_hive_1058 t1 +inner join test_hive_1056 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1056 +POSTHOOK: Input: default@test_hive_1057 +POSTHOOK: Input: default@test_hive_1058 +POSTHOOK: Input: default@test_hive_1059 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1055 +POSTHOOK: Lineage: test_hive_1055.creation_date EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.ds EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.ds_ts SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.source_file_name SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1041 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1041, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1042 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1042, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1043 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1043, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1044 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1044, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1045 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1045, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1046 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1046, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1047 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1047, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1048 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1048, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1049 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1049, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1050 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1050, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1051 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1051, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1052 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1052, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_1053 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_1053, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_116 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_116, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_117 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_117, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_118 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_118, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_119 EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_119, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_120 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_120, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_121 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_121, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_122 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_122, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_123 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_123, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_124 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_124, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_125 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_125, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_126 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_126, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_127 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_127, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_128 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_128, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_129 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_129, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_130 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_130, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_131 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_131, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_132 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_132, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_133 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_133, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_134 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_134, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_135 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_135, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_136 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_136, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_137 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_137, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_138 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_138, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_139 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_139, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_140 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_140, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_141 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_141, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_142 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_142, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_143 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_143, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_144 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_144, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_145 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_145, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_146 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_146, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_147 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_147, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_148 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_148, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_149 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_149, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_150 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_150, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_151 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_151, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_152 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_152, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_153 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_153, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_154 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_154, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_155 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_155, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_156 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_156, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_157 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_157, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_158 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_158, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_159 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_159, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.test_hive_160 SIMPLE [(test_hive_1057)test_hive_1057.FieldSchema(name:test_hive_160, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1055.ts EXPRESSION [(test_hive_1057)test_hive_1057.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1083 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1083 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1083 +( + test_hive_1072 string + ,test_hive_1065 string + ,test_hive_1073 string + ,test_hive_161 string + ,test_hive_162 string + ,test_hive_163 string + ,test_hive_164 string + ,test_hive_167 string + ,test_hive_168 string + ,test_hive_170 string + ,test_hive_197 string + ,test_hive_198 string + ,test_hive_200 string + ,test_hive_201 string + ,test_hive_202 string + ,test_hive_203 string + ,test_hive_205 string + ,test_hive_206 string + ,test_hive_212 string + ,test_hive_213 string + ,test_hive_178 string + ,test_hive_1060 string + ,test_hive_1061 string + ,test_hive_10612 string + ,test_hive_1063 string + ,test_hive_1064 string + ,test_hive_165 string + ,test_hive_166 string + ,test_hive_169 string + ,test_hive_193 string + ,test_hive_194 string + ,test_hive_195 string + ,test_hive_196 string + ,test_hive_204 string + ,test_hive_207 string + ,test_hive_208 string + ,test_hive_209 string + ,test_hive_210 string + ,test_hive_211 string + ,test_hive_171 string + ,test_hive_172 string + ,test_hive_173 string + ,test_hive_174 string + ,test_hive_175 string + ,test_hive_176 string + ,test_hive_177 string + ,test_hive_179 string + ,test_hive_180 string + ,test_hive_181 string + ,test_hive_182 string + ,test_hive_183 string + ,test_hive_184 string + ,test_hive_185 string + ,test_hive_186 string + ,test_hive_187 string + ,test_hive_188 string + ,test_hive_189 string + ,test_hive_190 string + ,test_hive_191 string + ,test_hive_192 string + ,test_hive_1067 string + ,test_hive_1067_a_g string + ,test_hive_1067_h string + ,test_hive_1066 string + ,test_hive_1070 string + ,test_hive_1070_a_d string + ,test_hive_1074 string + ,test_hive_1074_bp string + ,test_hive_1074_cont string + ,test_hive_1074_lag string + ,test_hive_1078 string + ,test_hive_1078_bp string + ,test_hive_1078_cont string + ,test_hive_1078_lag string + ,test_hive_199 string + ,test_hive_1082 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1083 +POSTHOOK: query: create table test_hive_1083 +( + test_hive_1072 string + ,test_hive_1065 string + ,test_hive_1073 string + ,test_hive_161 string + ,test_hive_162 string + ,test_hive_163 string + ,test_hive_164 string + ,test_hive_167 string + ,test_hive_168 string + ,test_hive_170 string + ,test_hive_197 string + ,test_hive_198 string + ,test_hive_200 string + ,test_hive_201 string + ,test_hive_202 string + ,test_hive_203 string + ,test_hive_205 string + ,test_hive_206 string + ,test_hive_212 string + ,test_hive_213 string + ,test_hive_178 string + ,test_hive_1060 string + ,test_hive_1061 string + ,test_hive_10612 string + ,test_hive_1063 string + ,test_hive_1064 string + ,test_hive_165 string + ,test_hive_166 string + ,test_hive_169 string + ,test_hive_193 string + ,test_hive_194 string + ,test_hive_195 string + ,test_hive_196 string + ,test_hive_204 string + ,test_hive_207 string + ,test_hive_208 string + ,test_hive_209 string + ,test_hive_210 string + ,test_hive_211 string + ,test_hive_171 string + ,test_hive_172 string + ,test_hive_173 string + ,test_hive_174 string + ,test_hive_175 string + ,test_hive_176 string + ,test_hive_177 string + ,test_hive_179 string + ,test_hive_180 string + ,test_hive_181 string + ,test_hive_182 string + ,test_hive_183 string + ,test_hive_184 string + ,test_hive_185 string + ,test_hive_186 string + ,test_hive_187 string + ,test_hive_188 string + ,test_hive_189 string + ,test_hive_190 string + ,test_hive_191 string + ,test_hive_192 string + ,test_hive_1067 string + ,test_hive_1067_a_g string + ,test_hive_1067_h string + ,test_hive_1066 string + ,test_hive_1070 string + ,test_hive_1070_a_d string + ,test_hive_1074 string + ,test_hive_1074_bp string + ,test_hive_1074_cont string + ,test_hive_1074_lag string + ,test_hive_1078 string + ,test_hive_1078_bp string + ,test_hive_1078_cont string + ,test_hive_1078_lag string + ,test_hive_199 string + ,test_hive_1082 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1083 +PREHOOK: query: create table if not exists test_hive_1086 +( + test_hive_1072 string + ,test_hive_1065 string + ,test_hive_1073 string + ,test_hive_161 string + ,test_hive_162 string + ,test_hive_163 string + ,test_hive_164 string + ,test_hive_167 string + ,test_hive_168 string + ,test_hive_170 string + ,test_hive_197 string + ,test_hive_198 string + ,test_hive_200 string + ,test_hive_201 string + ,test_hive_202 string + ,test_hive_203 string + ,test_hive_205 string + ,test_hive_206 string + ,test_hive_212 string + ,test_hive_213 string + ,test_hive_178 string + ,test_hive_1060 string + ,test_hive_1061 string + ,test_hive_10612 string + ,test_hive_1063 string + ,test_hive_1064 string + ,test_hive_165 string + ,test_hive_166 string + ,test_hive_169 string + ,test_hive_193 string + ,test_hive_194 string + ,test_hive_195 string + ,test_hive_196 string + ,test_hive_204 string + ,test_hive_207 string + ,test_hive_208 string + ,test_hive_209 string + ,test_hive_210 string + ,test_hive_211 string + ,test_hive_171 string + ,test_hive_172 string + ,test_hive_173 string + ,test_hive_174 string + ,test_hive_175 string + ,test_hive_176 string + ,test_hive_177 string + ,test_hive_179 string + ,test_hive_180 string + ,test_hive_181 string + ,test_hive_182 string + ,test_hive_183 string + ,test_hive_184 string + ,test_hive_185 string + ,test_hive_186 string + ,test_hive_187 string + ,test_hive_188 string + ,test_hive_189 string + ,test_hive_190 string + ,test_hive_191 string + ,test_hive_192 string + ,test_hive_1067 string + ,test_hive_1067_a_g string + ,test_hive_1067_h string + ,test_hive_1066 string + ,test_hive_1070 string + ,test_hive_1070_a_d string + ,test_hive_1074 string + ,test_hive_1074_bp string + ,test_hive_1074_cont string + ,test_hive_1074_lag string + ,test_hive_1078 string + ,test_hive_1078_bp string + ,test_hive_1078_cont string + ,test_hive_1078_lag string + ,test_hive_199 string + ,test_hive_1082 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1086 +POSTHOOK: query: create table if not exists test_hive_1086 +( + test_hive_1072 string + ,test_hive_1065 string + ,test_hive_1073 string + ,test_hive_161 string + ,test_hive_162 string + ,test_hive_163 string + ,test_hive_164 string + ,test_hive_167 string + ,test_hive_168 string + ,test_hive_170 string + ,test_hive_197 string + ,test_hive_198 string + ,test_hive_200 string + ,test_hive_201 string + ,test_hive_202 string + ,test_hive_203 string + ,test_hive_205 string + ,test_hive_206 string + ,test_hive_212 string + ,test_hive_213 string + ,test_hive_178 string + ,test_hive_1060 string + ,test_hive_1061 string + ,test_hive_10612 string + ,test_hive_1063 string + ,test_hive_1064 string + ,test_hive_165 string + ,test_hive_166 string + ,test_hive_169 string + ,test_hive_193 string + ,test_hive_194 string + ,test_hive_195 string + ,test_hive_196 string + ,test_hive_204 string + ,test_hive_207 string + ,test_hive_208 string + ,test_hive_209 string + ,test_hive_210 string + ,test_hive_211 string + ,test_hive_171 string + ,test_hive_172 string + ,test_hive_173 string + ,test_hive_174 string + ,test_hive_175 string + ,test_hive_176 string + ,test_hive_177 string + ,test_hive_179 string + ,test_hive_180 string + ,test_hive_181 string + ,test_hive_182 string + ,test_hive_183 string + ,test_hive_184 string + ,test_hive_185 string + ,test_hive_186 string + ,test_hive_187 string + ,test_hive_188 string + ,test_hive_189 string + ,test_hive_190 string + ,test_hive_191 string + ,test_hive_192 string + ,test_hive_1067 string + ,test_hive_1067_a_g string + ,test_hive_1067_h string + ,test_hive_1066 string + ,test_hive_1070 string + ,test_hive_1070_a_d string + ,test_hive_1074 string + ,test_hive_1074_bp string + ,test_hive_1074_cont string + ,test_hive_1074_lag string + ,test_hive_1078 string + ,test_hive_1078_bp string + ,test_hive_1078_cont string + ,test_hive_1078_lag string + ,test_hive_199 string + ,test_hive_1082 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1086 +PREHOOK: query: drop table if exists test_hive_1085 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1085 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1085 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1085 +POSTHOOK: query: create table if not exists test_hive_1085 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1085 +PREHOOK: query: drop view if exists test_hive_1088 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1088 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1088 +as +select + cast(test_hive_1072 as int) as test_hive_1072 + ,cast(test_hive_1065 as int) as test_hive_1065 + ,cast(test_hive_1073 as int) as test_hive_1073 + ,cast(test_hive_161 as string) as test_hive_161 + ,cast(test_hive_162 as string) as test_hive_162 + ,cast(test_hive_163 as string) as test_hive_163 + ,cast(test_hive_164 as string) as test_hive_164 + ,cast(test_hive_167 as string) as test_hive_167 + ,cast(test_hive_168 as string) as test_hive_168 + ,cast(test_hive_170 as string) as test_hive_170 + ,cast(test_hive_197 as string) as test_hive_197 + ,cast(test_hive_198 as string) as test_hive_198 + ,cast(test_hive_200 as string) as test_hive_200 + ,cast(test_hive_201 as string) as test_hive_201 + ,cast(test_hive_202 as string) as test_hive_202 + ,cast(test_hive_203 as string) as test_hive_203 + ,cast(test_hive_205 as string) as test_hive_205 + ,cast(test_hive_206 as string) as test_hive_206 + ,cast(test_hive_212 as string) as test_hive_212 + ,cast(test_hive_213 as string) as test_hive_213 + ,cast(test_hive_178 as string) as test_hive_178 + ,cast(from_unixtime(unix_timestamp(test_hive_1060,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1060 + ,cast(test_hive_1061 as string) as test_hive_1061 + ,cast(test_hive_10612 as string) as test_hive_10612 + ,cast(test_hive_1063 as string) as test_hive_1063 + ,cast(test_hive_1064 as string) as test_hive_1064 + ,cast(test_hive_165 as string) as test_hive_165 + ,cast(test_hive_166 as string) as test_hive_166 + ,cast(test_hive_169 as string) as test_hive_169 + ,cast(test_hive_193 as string) as test_hive_193 + ,cast(test_hive_194 as string) as test_hive_194 + ,cast(test_hive_195 as string) as test_hive_195 + ,cast(test_hive_196 as string) as test_hive_196 + ,cast(test_hive_204 as string) as test_hive_204 + ,cast(test_hive_207 as string) as test_hive_207 + ,cast(test_hive_208 as string) as test_hive_208 + ,cast(test_hive_209 as string) as test_hive_209 + ,cast(test_hive_210 as string) as test_hive_210 + ,cast(test_hive_211 as string) as test_hive_211 + ,cast(test_hive_171 as string) as test_hive_171 + ,cast(test_hive_172 as string) as test_hive_172 + ,cast(test_hive_173 as string) as test_hive_173 + ,cast(test_hive_174 as string) as test_hive_174 + ,cast(test_hive_175 as string) as test_hive_175 + ,cast(test_hive_176 as string) as test_hive_176 + ,cast(test_hive_177 as string) as test_hive_177 + ,cast(test_hive_179 as string) as test_hive_179 + ,cast(test_hive_180 as string) as test_hive_180 + ,cast(test_hive_181 as string) as test_hive_181 + ,cast(test_hive_182 as string) as test_hive_182 + ,cast(test_hive_183 as string) as test_hive_183 + ,cast(test_hive_184 as string) as test_hive_184 + ,cast(test_hive_185 as string) as test_hive_185 + ,cast(test_hive_186 as string) as test_hive_186 + ,cast(test_hive_187 as string) as test_hive_187 + ,cast(test_hive_188 as string) as test_hive_188 + ,cast(test_hive_189 as string) as test_hive_189 + ,cast(test_hive_190 as string) as test_hive_190 + ,cast(test_hive_191 as string) as test_hive_191 + ,cast(test_hive_192 as string) as test_hive_192 + ,cast(test_hive_1067 as int) as test_hive_1067 + ,cast(test_hive_1067_a_g as int) as test_hive_1067_a_g + ,cast(test_hive_1067_h as int) as test_hive_1067_h + ,cast(test_hive_1066 as int) as test_hive_1066 + ,cast(test_hive_1070 as int) as test_hive_1070 + ,cast(test_hive_1070_a_d as int) as test_hive_1070_a_d + ,cast(test_hive_1074 as int) as test_hive_1074 + ,cast(test_hive_1074_bp as int) as test_hive_1074_bp + ,cast(test_hive_1074_cont as int) as test_hive_1074_cont + ,cast(test_hive_1074_lag as int) as test_hive_1074_lag + ,cast(test_hive_1078 as int) as test_hive_1078 + ,cast(test_hive_1078_bp as int) as test_hive_1078_bp + ,cast(test_hive_1078_cont as int) as test_hive_1078_cont + ,cast(test_hive_1078_lag as int) as test_hive_1078_lag + ,cast(test_hive_199 as string) as test_hive_199 + ,cast(from_unixtime(unix_timestamp(test_hive_1082,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1082 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1086 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1086 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1088 +POSTHOOK: query: create view if not exists test_hive_1088 +as +select + cast(test_hive_1072 as int) as test_hive_1072 + ,cast(test_hive_1065 as int) as test_hive_1065 + ,cast(test_hive_1073 as int) as test_hive_1073 + ,cast(test_hive_161 as string) as test_hive_161 + ,cast(test_hive_162 as string) as test_hive_162 + ,cast(test_hive_163 as string) as test_hive_163 + ,cast(test_hive_164 as string) as test_hive_164 + ,cast(test_hive_167 as string) as test_hive_167 + ,cast(test_hive_168 as string) as test_hive_168 + ,cast(test_hive_170 as string) as test_hive_170 + ,cast(test_hive_197 as string) as test_hive_197 + ,cast(test_hive_198 as string) as test_hive_198 + ,cast(test_hive_200 as string) as test_hive_200 + ,cast(test_hive_201 as string) as test_hive_201 + ,cast(test_hive_202 as string) as test_hive_202 + ,cast(test_hive_203 as string) as test_hive_203 + ,cast(test_hive_205 as string) as test_hive_205 + ,cast(test_hive_206 as string) as test_hive_206 + ,cast(test_hive_212 as string) as test_hive_212 + ,cast(test_hive_213 as string) as test_hive_213 + ,cast(test_hive_178 as string) as test_hive_178 + ,cast(from_unixtime(unix_timestamp(test_hive_1060,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1060 + ,cast(test_hive_1061 as string) as test_hive_1061 + ,cast(test_hive_10612 as string) as test_hive_10612 + ,cast(test_hive_1063 as string) as test_hive_1063 + ,cast(test_hive_1064 as string) as test_hive_1064 + ,cast(test_hive_165 as string) as test_hive_165 + ,cast(test_hive_166 as string) as test_hive_166 + ,cast(test_hive_169 as string) as test_hive_169 + ,cast(test_hive_193 as string) as test_hive_193 + ,cast(test_hive_194 as string) as test_hive_194 + ,cast(test_hive_195 as string) as test_hive_195 + ,cast(test_hive_196 as string) as test_hive_196 + ,cast(test_hive_204 as string) as test_hive_204 + ,cast(test_hive_207 as string) as test_hive_207 + ,cast(test_hive_208 as string) as test_hive_208 + ,cast(test_hive_209 as string) as test_hive_209 + ,cast(test_hive_210 as string) as test_hive_210 + ,cast(test_hive_211 as string) as test_hive_211 + ,cast(test_hive_171 as string) as test_hive_171 + ,cast(test_hive_172 as string) as test_hive_172 + ,cast(test_hive_173 as string) as test_hive_173 + ,cast(test_hive_174 as string) as test_hive_174 + ,cast(test_hive_175 as string) as test_hive_175 + ,cast(test_hive_176 as string) as test_hive_176 + ,cast(test_hive_177 as string) as test_hive_177 + ,cast(test_hive_179 as string) as test_hive_179 + ,cast(test_hive_180 as string) as test_hive_180 + ,cast(test_hive_181 as string) as test_hive_181 + ,cast(test_hive_182 as string) as test_hive_182 + ,cast(test_hive_183 as string) as test_hive_183 + ,cast(test_hive_184 as string) as test_hive_184 + ,cast(test_hive_185 as string) as test_hive_185 + ,cast(test_hive_186 as string) as test_hive_186 + ,cast(test_hive_187 as string) as test_hive_187 + ,cast(test_hive_188 as string) as test_hive_188 + ,cast(test_hive_189 as string) as test_hive_189 + ,cast(test_hive_190 as string) as test_hive_190 + ,cast(test_hive_191 as string) as test_hive_191 + ,cast(test_hive_192 as string) as test_hive_192 + ,cast(test_hive_1067 as int) as test_hive_1067 + ,cast(test_hive_1067_a_g as int) as test_hive_1067_a_g + ,cast(test_hive_1067_h as int) as test_hive_1067_h + ,cast(test_hive_1066 as int) as test_hive_1066 + ,cast(test_hive_1070 as int) as test_hive_1070 + ,cast(test_hive_1070_a_d as int) as test_hive_1070_a_d + ,cast(test_hive_1074 as int) as test_hive_1074 + ,cast(test_hive_1074_bp as int) as test_hive_1074_bp + ,cast(test_hive_1074_cont as int) as test_hive_1074_cont + ,cast(test_hive_1074_lag as int) as test_hive_1074_lag + ,cast(test_hive_1078 as int) as test_hive_1078 + ,cast(test_hive_1078_bp as int) as test_hive_1078_bp + ,cast(test_hive_1078_cont as int) as test_hive_1078_cont + ,cast(test_hive_1078_lag as int) as test_hive_1078_lag + ,cast(test_hive_199 as string) as test_hive_199 + ,cast(from_unixtime(unix_timestamp(test_hive_1082,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1082 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1086 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1086 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1088 +POSTHOOK: Lineage: test_hive_1088.creation_date EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.ds EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.ds_ts SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.source_file_name SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1060 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1060, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1061 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1061, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_10612 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_10612, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1063 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1063, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1064 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1064, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1065 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1065, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1066 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1066, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1067 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1067_a_g EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067_a_g, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1067_h EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067_h, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1070 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1070, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1070_a_d EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1070_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1072 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1072, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1073 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1073, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1074 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1074_bp EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1074_cont EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_cont, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1074_lag EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1078 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1078_bp EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1078_cont EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_cont, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1078_lag EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_1082 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1082, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_161 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_161, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_162 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_162, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_163 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_163, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_164 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_164, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_165 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_165, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_166 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_166, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_167 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_167, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_168 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_168, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_169 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_169, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_170 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_170, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_171 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_171, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_172 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_172, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_173 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_173, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_174 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_174, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_175 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_175, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_176 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_176, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_177 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_177, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_178 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_178, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_179 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_179, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_180 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_180, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_181 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_181, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_182 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_182, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_183 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_183, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_184 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_184, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_185 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_185, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_186 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_186, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_187 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_187, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_188 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_188, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_189 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_189, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_190 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_190, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_191 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_191, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_192 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_192, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_193 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_193, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_194 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_194, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_195 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_195, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_196 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_196, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_197 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_197, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_198 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_198, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_199 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_199, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_200 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_200, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_201 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_201, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_202 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_202, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_203 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_203, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_204 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_204, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_205 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_205, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_206 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_206, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_207 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_207, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_208 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_208, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_209 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_209, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_210 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_210, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_211 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_211, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_212 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_212, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.test_hive_213 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_213, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1088.ts EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1087 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1087 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1087 +as +select + test_hive_1072 as test_hive_1072 + ,test_hive_1065 as test_hive_1065 + ,test_hive_1073 as test_hive_1073 + ,test_hive_161 as test_hive_161 + ,test_hive_162 as test_hive_162 + ,test_hive_163 as test_hive_163 + ,test_hive_164 as test_hive_164 + ,test_hive_167 as test_hive_167 + ,test_hive_168 as test_hive_168 + ,test_hive_170 as test_hive_170 + ,test_hive_197 as test_hive_197 + ,test_hive_198 as test_hive_198 + ,test_hive_200 as test_hive_200 + ,test_hive_201 as test_hive_201 + ,test_hive_202 as test_hive_202 + ,test_hive_203 as test_hive_203 + ,test_hive_205 as test_hive_205 + ,test_hive_206 as test_hive_206 + ,test_hive_212 as test_hive_212 + ,test_hive_213 as test_hive_213 + ,test_hive_178 as test_hive_178 + ,test_hive_1060 as test_hive_1060 + ,test_hive_1061 as test_hive_1061 + ,test_hive_10612 as test_hive_10612 + ,test_hive_1063 as test_hive_1063 + ,test_hive_1064 as test_hive_1064 + ,test_hive_165 as test_hive_165 + ,test_hive_166 as test_hive_166 + ,test_hive_169 as test_hive_169 + ,test_hive_193 as test_hive_193 + ,test_hive_194 as test_hive_194 + ,test_hive_195 as test_hive_195 + ,test_hive_196 as test_hive_196 + ,test_hive_204 as test_hive_204 + ,test_hive_207 as test_hive_207 + ,test_hive_208 as test_hive_208 + ,test_hive_209 as test_hive_209 + ,test_hive_210 as test_hive_210 + ,test_hive_211 as test_hive_211 + ,test_hive_171 as test_hive_171 + ,test_hive_172 as test_hive_172 + ,test_hive_173 as test_hive_173 + ,test_hive_174 as test_hive_174 + ,test_hive_175 as test_hive_175 + ,test_hive_176 as test_hive_176 + ,test_hive_177 as test_hive_177 + ,test_hive_179 as test_hive_179 + ,test_hive_180 as test_hive_180 + ,test_hive_181 as test_hive_181 + ,test_hive_182 as test_hive_182 + ,test_hive_183 as test_hive_183 + ,test_hive_184 as test_hive_184 + ,test_hive_185 as test_hive_185 + ,test_hive_186 as test_hive_186 + ,test_hive_187 as test_hive_187 + ,test_hive_188 as test_hive_188 + ,test_hive_189 as test_hive_189 + ,test_hive_190 as test_hive_190 + ,test_hive_191 as test_hive_191 + ,test_hive_192 as test_hive_192 + ,test_hive_1067 as test_hive_1067 + ,test_hive_1067_a_g as test_hive_1067_a_g + ,test_hive_1067_h as test_hive_1067_h + ,test_hive_1066 as test_hive_1066 + ,test_hive_1070 as test_hive_1070 + ,test_hive_1070_a_d as test_hive_1070_a_d + ,test_hive_1074 as test_hive_1074 + ,test_hive_1074_bp as test_hive_1074_bp + ,test_hive_1074_cont as test_hive_1074_cont + ,test_hive_1074_lag as test_hive_1074_lag + ,test_hive_1078 as test_hive_1078 + ,test_hive_1078_bp as test_hive_1078_bp + ,test_hive_1078_cont as test_hive_1078_cont + ,test_hive_1078_lag as test_hive_1078_lag + ,test_hive_199 as test_hive_199 + ,test_hive_1082 as test_hive_1082 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1088 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1086 +PREHOOK: Input: default@test_hive_1088 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1087 +POSTHOOK: query: create view test_hive_1087 +as +select + test_hive_1072 as test_hive_1072 + ,test_hive_1065 as test_hive_1065 + ,test_hive_1073 as test_hive_1073 + ,test_hive_161 as test_hive_161 + ,test_hive_162 as test_hive_162 + ,test_hive_163 as test_hive_163 + ,test_hive_164 as test_hive_164 + ,test_hive_167 as test_hive_167 + ,test_hive_168 as test_hive_168 + ,test_hive_170 as test_hive_170 + ,test_hive_197 as test_hive_197 + ,test_hive_198 as test_hive_198 + ,test_hive_200 as test_hive_200 + ,test_hive_201 as test_hive_201 + ,test_hive_202 as test_hive_202 + ,test_hive_203 as test_hive_203 + ,test_hive_205 as test_hive_205 + ,test_hive_206 as test_hive_206 + ,test_hive_212 as test_hive_212 + ,test_hive_213 as test_hive_213 + ,test_hive_178 as test_hive_178 + ,test_hive_1060 as test_hive_1060 + ,test_hive_1061 as test_hive_1061 + ,test_hive_10612 as test_hive_10612 + ,test_hive_1063 as test_hive_1063 + ,test_hive_1064 as test_hive_1064 + ,test_hive_165 as test_hive_165 + ,test_hive_166 as test_hive_166 + ,test_hive_169 as test_hive_169 + ,test_hive_193 as test_hive_193 + ,test_hive_194 as test_hive_194 + ,test_hive_195 as test_hive_195 + ,test_hive_196 as test_hive_196 + ,test_hive_204 as test_hive_204 + ,test_hive_207 as test_hive_207 + ,test_hive_208 as test_hive_208 + ,test_hive_209 as test_hive_209 + ,test_hive_210 as test_hive_210 + ,test_hive_211 as test_hive_211 + ,test_hive_171 as test_hive_171 + ,test_hive_172 as test_hive_172 + ,test_hive_173 as test_hive_173 + ,test_hive_174 as test_hive_174 + ,test_hive_175 as test_hive_175 + ,test_hive_176 as test_hive_176 + ,test_hive_177 as test_hive_177 + ,test_hive_179 as test_hive_179 + ,test_hive_180 as test_hive_180 + ,test_hive_181 as test_hive_181 + ,test_hive_182 as test_hive_182 + ,test_hive_183 as test_hive_183 + ,test_hive_184 as test_hive_184 + ,test_hive_185 as test_hive_185 + ,test_hive_186 as test_hive_186 + ,test_hive_187 as test_hive_187 + ,test_hive_188 as test_hive_188 + ,test_hive_189 as test_hive_189 + ,test_hive_190 as test_hive_190 + ,test_hive_191 as test_hive_191 + ,test_hive_192 as test_hive_192 + ,test_hive_1067 as test_hive_1067 + ,test_hive_1067_a_g as test_hive_1067_a_g + ,test_hive_1067_h as test_hive_1067_h + ,test_hive_1066 as test_hive_1066 + ,test_hive_1070 as test_hive_1070 + ,test_hive_1070_a_d as test_hive_1070_a_d + ,test_hive_1074 as test_hive_1074 + ,test_hive_1074_bp as test_hive_1074_bp + ,test_hive_1074_cont as test_hive_1074_cont + ,test_hive_1074_lag as test_hive_1074_lag + ,test_hive_1078 as test_hive_1078 + ,test_hive_1078_bp as test_hive_1078_bp + ,test_hive_1078_cont as test_hive_1078_cont + ,test_hive_1078_lag as test_hive_1078_lag + ,test_hive_199 as test_hive_199 + ,test_hive_1082 as test_hive_1082 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1088 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1086 +POSTHOOK: Input: default@test_hive_1088 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1087 +POSTHOOK: Lineage: test_hive_1087.creation_date EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.ds EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.ds_ts SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.source_file_name SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1060 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1060, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1061 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1061, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_10612 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_10612, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1063 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1063, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1064 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1064, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1065 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1065, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1066 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1066, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1067 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1067_a_g EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067_a_g, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1067_h EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067_h, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1070 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1070, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1070_a_d EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1070_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1072 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1072, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1073 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1073, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1074 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1074_bp EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1074_cont EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_cont, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1074_lag EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1078 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1078_bp EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1078_cont EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_cont, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1078_lag EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_1082 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1082, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_161 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_161, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_162 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_162, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_163 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_163, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_164 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_164, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_165 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_165, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_166 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_166, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_167 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_167, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_168 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_168, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_169 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_169, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_170 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_170, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_171 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_171, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_172 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_172, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_173 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_173, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_174 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_174, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_175 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_175, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_176 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_176, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_177 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_177, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_178 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_178, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_179 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_179, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_180 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_180, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_181 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_181, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_182 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_182, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_183 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_183, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_184 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_184, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_185 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_185, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_186 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_186, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_187 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_187, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_188 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_188, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_189 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_189, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_190 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_190, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_191 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_191, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_192 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_192, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_193 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_193, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_194 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_194, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_195 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_195, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_196 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_196, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_197 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_197, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_198 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_198, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_199 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_199, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_200 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_200, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_201 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_201, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_202 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_202, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_203 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_203, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_204 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_204, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_205 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_205, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_206 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_206, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_207 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_207, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_208 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_208, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_209 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_209, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_210 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_210, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_211 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_211, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_212 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_212, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.test_hive_213 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_213, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1087.ts EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1084 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1084 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1084 +as +select t1.* +from test_hive_1087 t1 +inner join test_hive_1085 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1085 +PREHOOK: Input: default@test_hive_1086 +PREHOOK: Input: default@test_hive_1087 +PREHOOK: Input: default@test_hive_1088 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1084 +POSTHOOK: query: create view test_hive_1084 +as +select t1.* +from test_hive_1087 t1 +inner join test_hive_1085 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1085 +POSTHOOK: Input: default@test_hive_1086 +POSTHOOK: Input: default@test_hive_1087 +POSTHOOK: Input: default@test_hive_1088 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1084 +POSTHOOK: Lineage: test_hive_1084.creation_date EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.ds EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.ds_ts SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.source_file_name SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1060 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1060, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1061 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1061, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_10612 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_10612, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1063 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1063, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1064 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1064, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1065 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1065, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1066 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1066, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1067 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1067_a_g EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067_a_g, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1067_h EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1067_h, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1070 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1070, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1070_a_d EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1070_a_d, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1072 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1072, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1073 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1073, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1074 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1074_bp EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1074_cont EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_cont, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1074_lag EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1074_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1078 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1078_bp EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1078_cont EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_cont, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1078_lag EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1078_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_1082 EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_1082, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_161 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_161, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_162 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_162, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_163 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_163, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_164 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_164, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_165 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_165, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_166 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_166, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_167 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_167, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_168 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_168, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_169 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_169, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_170 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_170, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_171 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_171, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_172 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_172, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_173 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_173, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_174 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_174, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_175 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_175, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_176 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_176, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_177 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_177, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_178 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_178, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_179 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_179, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_180 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_180, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_181 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_181, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_182 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_182, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_183 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_183, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_184 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_184, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_185 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_185, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_186 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_186, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_187 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_187, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_188 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_188, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_189 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_189, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_190 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_190, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_191 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_191, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_192 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_192, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_193 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_193, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_194 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_194, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_195 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_195, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_196 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_196, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_197 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_197, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_198 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_198, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_199 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_199, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_200 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_200, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_201 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_201, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_202 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_202, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_203 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_203, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_204 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_204, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_205 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_205, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_206 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_206, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_207 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_207, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_208 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_208, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_209 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_209, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_210 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_210, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_211 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_211, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_212 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_212, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.test_hive_213 SIMPLE [(test_hive_1086)test_hive_1086.FieldSchema(name:test_hive_213, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1084.ts EXPRESSION [(test_hive_1086)test_hive_1086.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1100 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1100 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1100 +( + test_hive_1097 string + ,test_hive_1095 string + ,test_hive_1098 string + ,test_hive_1089 string + ,test_hive_1090 string + ,test_hive_10902 string + ,test_hive_1092 string + ,test_hive_1093 string + ,test_hive_244 string + ,test_hive_225 string + ,test_hive_214 string + ,test_hive_215 string + ,test_hive_216 string + ,test_hive_217 string + ,test_hive_240 string + ,test_hive_241 string + ,test_hive_242 string + ,test_hive_243 string + ,test_hive_245 string + ,test_hive_246 string + ,test_hive_247 string + ,test_hive_248 string + ,test_hive_249 string + ,test_hive_250 string + ,test_hive_218 string + ,test_hive_219 string + ,test_hive_220 string + ,test_hive_221 string + ,test_hive_222 string + ,test_hive_223 string + ,test_hive_224 string + ,test_hive_226 string + ,test_hive_227 string + ,test_hive_228 string + ,test_hive_229 string + ,test_hive_230 string + ,test_hive_231 string + ,test_hive_232 string + ,test_hive_233 string + ,test_hive_234 string + ,test_hive_235 string + ,test_hive_236 string + ,test_hive_237 string + ,test_hive_238 string + ,test_hive_239 string + ,test_hive_1094 string + ,test_hive_1096 string + ,test_hive_1099 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1100 +POSTHOOK: query: create table test_hive_1100 +( + test_hive_1097 string + ,test_hive_1095 string + ,test_hive_1098 string + ,test_hive_1089 string + ,test_hive_1090 string + ,test_hive_10902 string + ,test_hive_1092 string + ,test_hive_1093 string + ,test_hive_244 string + ,test_hive_225 string + ,test_hive_214 string + ,test_hive_215 string + ,test_hive_216 string + ,test_hive_217 string + ,test_hive_240 string + ,test_hive_241 string + ,test_hive_242 string + ,test_hive_243 string + ,test_hive_245 string + ,test_hive_246 string + ,test_hive_247 string + ,test_hive_248 string + ,test_hive_249 string + ,test_hive_250 string + ,test_hive_218 string + ,test_hive_219 string + ,test_hive_220 string + ,test_hive_221 string + ,test_hive_222 string + ,test_hive_223 string + ,test_hive_224 string + ,test_hive_226 string + ,test_hive_227 string + ,test_hive_228 string + ,test_hive_229 string + ,test_hive_230 string + ,test_hive_231 string + ,test_hive_232 string + ,test_hive_233 string + ,test_hive_234 string + ,test_hive_235 string + ,test_hive_236 string + ,test_hive_237 string + ,test_hive_238 string + ,test_hive_239 string + ,test_hive_1094 string + ,test_hive_1096 string + ,test_hive_1099 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1100 +PREHOOK: query: create table if not exists test_hive_1103 +( + test_hive_1097 string + ,test_hive_1095 string + ,test_hive_1098 string + ,test_hive_1089 string + ,test_hive_1090 string + ,test_hive_10902 string + ,test_hive_1092 string + ,test_hive_1093 string + ,test_hive_244 string + ,test_hive_225 string + ,test_hive_214 string + ,test_hive_215 string + ,test_hive_216 string + ,test_hive_217 string + ,test_hive_240 string + ,test_hive_241 string + ,test_hive_242 string + ,test_hive_243 string + ,test_hive_245 string + ,test_hive_246 string + ,test_hive_247 string + ,test_hive_248 string + ,test_hive_249 string + ,test_hive_250 string + ,test_hive_218 string + ,test_hive_219 string + ,test_hive_220 string + ,test_hive_221 string + ,test_hive_222 string + ,test_hive_223 string + ,test_hive_224 string + ,test_hive_226 string + ,test_hive_227 string + ,test_hive_228 string + ,test_hive_229 string + ,test_hive_230 string + ,test_hive_231 string + ,test_hive_232 string + ,test_hive_233 string + ,test_hive_234 string + ,test_hive_235 string + ,test_hive_236 string + ,test_hive_237 string + ,test_hive_238 string + ,test_hive_239 string + ,test_hive_1094 string + ,test_hive_1096 string + ,test_hive_1099 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1103 +POSTHOOK: query: create table if not exists test_hive_1103 +( + test_hive_1097 string + ,test_hive_1095 string + ,test_hive_1098 string + ,test_hive_1089 string + ,test_hive_1090 string + ,test_hive_10902 string + ,test_hive_1092 string + ,test_hive_1093 string + ,test_hive_244 string + ,test_hive_225 string + ,test_hive_214 string + ,test_hive_215 string + ,test_hive_216 string + ,test_hive_217 string + ,test_hive_240 string + ,test_hive_241 string + ,test_hive_242 string + ,test_hive_243 string + ,test_hive_245 string + ,test_hive_246 string + ,test_hive_247 string + ,test_hive_248 string + ,test_hive_249 string + ,test_hive_250 string + ,test_hive_218 string + ,test_hive_219 string + ,test_hive_220 string + ,test_hive_221 string + ,test_hive_222 string + ,test_hive_223 string + ,test_hive_224 string + ,test_hive_226 string + ,test_hive_227 string + ,test_hive_228 string + ,test_hive_229 string + ,test_hive_230 string + ,test_hive_231 string + ,test_hive_232 string + ,test_hive_233 string + ,test_hive_234 string + ,test_hive_235 string + ,test_hive_236 string + ,test_hive_237 string + ,test_hive_238 string + ,test_hive_239 string + ,test_hive_1094 string + ,test_hive_1096 string + ,test_hive_1099 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1103 +PREHOOK: query: drop table if exists test_hive_1102 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1102 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1102 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1102 +POSTHOOK: query: create table if not exists test_hive_1102 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1102 +PREHOOK: query: drop view if exists test_hive_1105 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1105 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1105 +as +select + cast(test_hive_1097 as int) as test_hive_1097 + ,cast(test_hive_1095 as int) as test_hive_1095 + ,cast(test_hive_1098 as int) as test_hive_1098 + ,cast(from_unixtime(unix_timestamp(test_hive_1089,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1089 + ,cast(test_hive_1090 as string) as test_hive_1090 + ,cast(test_hive_10902 as string) as test_hive_10902 + ,cast(test_hive_1092 as string) as test_hive_1092 + ,cast(test_hive_1093 as string) as test_hive_1093 + ,cast(test_hive_244 as string) as test_hive_244 + ,cast(test_hive_225 as string) as test_hive_225 + ,cast(test_hive_214 as string) as test_hive_214 + ,cast(from_unixtime(unix_timestamp(test_hive_215,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_215 + ,cast(test_hive_216 as string) as test_hive_216 + ,cast(test_hive_217 as string) as test_hive_217 + ,cast(test_hive_240 as string) as test_hive_240 + ,cast(test_hive_241 as string) as test_hive_241 + ,cast(test_hive_242 as string) as test_hive_242 + ,cast(test_hive_243 as string) as test_hive_243 + ,cast(test_hive_245 as string) as test_hive_245 + ,cast(test_hive_246 as string) as test_hive_246 + ,cast(test_hive_247 as string) as test_hive_247 + ,cast(test_hive_248 as string) as test_hive_248 + ,cast(test_hive_249 as string) as test_hive_249 + ,cast(test_hive_250 as string) as test_hive_250 + ,cast(test_hive_218 as string) as test_hive_218 + ,cast(test_hive_219 as string) as test_hive_219 + ,cast(test_hive_220 as string) as test_hive_220 + ,cast(test_hive_221 as string) as test_hive_221 + ,cast(test_hive_222 as string) as test_hive_222 + ,cast(test_hive_223 as string) as test_hive_223 + ,cast(test_hive_224 as string) as test_hive_224 + ,cast(test_hive_226 as string) as test_hive_226 + ,cast(test_hive_227 as string) as test_hive_227 + ,cast(test_hive_228 as string) as test_hive_228 + ,cast(test_hive_229 as string) as test_hive_229 + ,cast(test_hive_230 as string) as test_hive_230 + ,cast(test_hive_231 as string) as test_hive_231 + ,cast(test_hive_232 as string) as test_hive_232 + ,cast(test_hive_233 as string) as test_hive_233 + ,cast(test_hive_234 as string) as test_hive_234 + ,cast(test_hive_235 as string) as test_hive_235 + ,cast(test_hive_236 as string) as test_hive_236 + ,cast(test_hive_237 as string) as test_hive_237 + ,cast(test_hive_238 as string) as test_hive_238 + ,cast(test_hive_239 as string) as test_hive_239 + ,cast(test_hive_1094 as int) as test_hive_1094 + ,cast(test_hive_1096 as int) as test_hive_1096 + ,cast(from_unixtime(unix_timestamp(test_hive_1099,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1099 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1103 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1103 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1105 +POSTHOOK: query: create view if not exists test_hive_1105 +as +select + cast(test_hive_1097 as int) as test_hive_1097 + ,cast(test_hive_1095 as int) as test_hive_1095 + ,cast(test_hive_1098 as int) as test_hive_1098 + ,cast(from_unixtime(unix_timestamp(test_hive_1089,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1089 + ,cast(test_hive_1090 as string) as test_hive_1090 + ,cast(test_hive_10902 as string) as test_hive_10902 + ,cast(test_hive_1092 as string) as test_hive_1092 + ,cast(test_hive_1093 as string) as test_hive_1093 + ,cast(test_hive_244 as string) as test_hive_244 + ,cast(test_hive_225 as string) as test_hive_225 + ,cast(test_hive_214 as string) as test_hive_214 + ,cast(from_unixtime(unix_timestamp(test_hive_215,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_215 + ,cast(test_hive_216 as string) as test_hive_216 + ,cast(test_hive_217 as string) as test_hive_217 + ,cast(test_hive_240 as string) as test_hive_240 + ,cast(test_hive_241 as string) as test_hive_241 + ,cast(test_hive_242 as string) as test_hive_242 + ,cast(test_hive_243 as string) as test_hive_243 + ,cast(test_hive_245 as string) as test_hive_245 + ,cast(test_hive_246 as string) as test_hive_246 + ,cast(test_hive_247 as string) as test_hive_247 + ,cast(test_hive_248 as string) as test_hive_248 + ,cast(test_hive_249 as string) as test_hive_249 + ,cast(test_hive_250 as string) as test_hive_250 + ,cast(test_hive_218 as string) as test_hive_218 + ,cast(test_hive_219 as string) as test_hive_219 + ,cast(test_hive_220 as string) as test_hive_220 + ,cast(test_hive_221 as string) as test_hive_221 + ,cast(test_hive_222 as string) as test_hive_222 + ,cast(test_hive_223 as string) as test_hive_223 + ,cast(test_hive_224 as string) as test_hive_224 + ,cast(test_hive_226 as string) as test_hive_226 + ,cast(test_hive_227 as string) as test_hive_227 + ,cast(test_hive_228 as string) as test_hive_228 + ,cast(test_hive_229 as string) as test_hive_229 + ,cast(test_hive_230 as string) as test_hive_230 + ,cast(test_hive_231 as string) as test_hive_231 + ,cast(test_hive_232 as string) as test_hive_232 + ,cast(test_hive_233 as string) as test_hive_233 + ,cast(test_hive_234 as string) as test_hive_234 + ,cast(test_hive_235 as string) as test_hive_235 + ,cast(test_hive_236 as string) as test_hive_236 + ,cast(test_hive_237 as string) as test_hive_237 + ,cast(test_hive_238 as string) as test_hive_238 + ,cast(test_hive_239 as string) as test_hive_239 + ,cast(test_hive_1094 as int) as test_hive_1094 + ,cast(test_hive_1096 as int) as test_hive_1096 + ,cast(from_unixtime(unix_timestamp(test_hive_1099,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1099 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1103 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1103 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1105 +POSTHOOK: Lineage: test_hive_1105.creation_date EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.ds EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.ds_ts SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.source_file_name SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1089 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1089, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1090 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1090, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_10902 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_10902, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1092 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1092, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1093 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1093, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1094 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1094, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1095 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1095, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1096 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1096, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1097 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1097, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1098 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1098, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_1099 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1099, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_214 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_214, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_215 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_215, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_216 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_216, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_217 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_217, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_218 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_218, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_219 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_219, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_220 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_220, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_221 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_221, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_222 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_222, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_223 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_223, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_224 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_224, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_225 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_225, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_226 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_226, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_227 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_227, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_228 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_228, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_229 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_229, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_230 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_230, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_231 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_231, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_232 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_232, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_233 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_233, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_234 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_234, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_235 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_235, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_236 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_236, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_237 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_237, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_238 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_238, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_239 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_239, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_240 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_240, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_241 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_241, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_242 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_242, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_243 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_243, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_244 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_244, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_245 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_245, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_246 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_246, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_247 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_247, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_248 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_248, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_249 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_249, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.test_hive_250 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_250, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1105.ts EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1104 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1104 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1104 +as +select + test_hive_1097 as test_hive_1097 + ,test_hive_1095 as test_hive_1095 + ,test_hive_1098 as test_hive_1098 + ,test_hive_1089 as test_hive_1089 + ,test_hive_1090 as test_hive_1090 + ,test_hive_10902 as test_hive_10902 + ,test_hive_1092 as test_hive_1092 + ,test_hive_1093 as test_hive_1093 + ,test_hive_244 as test_hive_244 + ,test_hive_225 as test_hive_225 + ,test_hive_214 as test_hive_214 + ,test_hive_215 as test_hive_215 + ,test_hive_216 as test_hive_216 + ,test_hive_217 as test_hive_217 + ,test_hive_240 as test_hive_240 + ,test_hive_241 as test_hive_241 + ,test_hive_242 as test_hive_242 + ,test_hive_243 as test_hive_243 + ,test_hive_245 as test_hive_245 + ,test_hive_246 as test_hive_246 + ,test_hive_247 as test_hive_247 + ,test_hive_248 as test_hive_248 + ,test_hive_249 as test_hive_249 + ,test_hive_250 as test_hive_250 + ,test_hive_218 as test_hive_218 + ,test_hive_219 as test_hive_219 + ,test_hive_220 as test_hive_220 + ,test_hive_221 as test_hive_221 + ,test_hive_222 as test_hive_222 + ,test_hive_223 as test_hive_223 + ,test_hive_224 as test_hive_224 + ,test_hive_226 as test_hive_226 + ,test_hive_227 as test_hive_227 + ,test_hive_228 as test_hive_228 + ,test_hive_229 as test_hive_229 + ,test_hive_230 as test_hive_230 + ,test_hive_231 as test_hive_231 + ,test_hive_232 as test_hive_232 + ,test_hive_233 as test_hive_233 + ,test_hive_234 as test_hive_234 + ,test_hive_235 as test_hive_235 + ,test_hive_236 as test_hive_236 + ,test_hive_237 as test_hive_237 + ,test_hive_238 as test_hive_238 + ,test_hive_239 as test_hive_239 + ,test_hive_1094 as test_hive_1094 + ,test_hive_1096 as test_hive_1096 + ,test_hive_1099 as test_hive_1099 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1105 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1103 +PREHOOK: Input: default@test_hive_1105 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1104 +POSTHOOK: query: create view test_hive_1104 +as +select + test_hive_1097 as test_hive_1097 + ,test_hive_1095 as test_hive_1095 + ,test_hive_1098 as test_hive_1098 + ,test_hive_1089 as test_hive_1089 + ,test_hive_1090 as test_hive_1090 + ,test_hive_10902 as test_hive_10902 + ,test_hive_1092 as test_hive_1092 + ,test_hive_1093 as test_hive_1093 + ,test_hive_244 as test_hive_244 + ,test_hive_225 as test_hive_225 + ,test_hive_214 as test_hive_214 + ,test_hive_215 as test_hive_215 + ,test_hive_216 as test_hive_216 + ,test_hive_217 as test_hive_217 + ,test_hive_240 as test_hive_240 + ,test_hive_241 as test_hive_241 + ,test_hive_242 as test_hive_242 + ,test_hive_243 as test_hive_243 + ,test_hive_245 as test_hive_245 + ,test_hive_246 as test_hive_246 + ,test_hive_247 as test_hive_247 + ,test_hive_248 as test_hive_248 + ,test_hive_249 as test_hive_249 + ,test_hive_250 as test_hive_250 + ,test_hive_218 as test_hive_218 + ,test_hive_219 as test_hive_219 + ,test_hive_220 as test_hive_220 + ,test_hive_221 as test_hive_221 + ,test_hive_222 as test_hive_222 + ,test_hive_223 as test_hive_223 + ,test_hive_224 as test_hive_224 + ,test_hive_226 as test_hive_226 + ,test_hive_227 as test_hive_227 + ,test_hive_228 as test_hive_228 + ,test_hive_229 as test_hive_229 + ,test_hive_230 as test_hive_230 + ,test_hive_231 as test_hive_231 + ,test_hive_232 as test_hive_232 + ,test_hive_233 as test_hive_233 + ,test_hive_234 as test_hive_234 + ,test_hive_235 as test_hive_235 + ,test_hive_236 as test_hive_236 + ,test_hive_237 as test_hive_237 + ,test_hive_238 as test_hive_238 + ,test_hive_239 as test_hive_239 + ,test_hive_1094 as test_hive_1094 + ,test_hive_1096 as test_hive_1096 + ,test_hive_1099 as test_hive_1099 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1105 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1103 +POSTHOOK: Input: default@test_hive_1105 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1104 +POSTHOOK: Lineage: test_hive_1104.creation_date EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.ds EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.ds_ts SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.source_file_name SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1089 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1089, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1090 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1090, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_10902 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_10902, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1092 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1092, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1093 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1093, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1094 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1094, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1095 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1095, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1096 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1096, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1097 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1097, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1098 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1098, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_1099 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1099, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_214 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_214, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_215 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_215, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_216 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_216, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_217 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_217, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_218 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_218, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_219 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_219, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_220 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_220, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_221 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_221, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_222 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_222, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_223 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_223, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_224 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_224, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_225 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_225, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_226 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_226, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_227 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_227, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_228 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_228, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_229 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_229, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_230 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_230, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_231 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_231, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_232 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_232, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_233 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_233, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_234 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_234, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_235 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_235, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_236 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_236, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_237 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_237, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_238 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_238, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_239 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_239, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_240 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_240, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_241 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_241, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_242 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_242, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_243 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_243, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_244 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_244, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_245 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_245, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_246 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_246, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_247 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_247, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_248 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_248, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_249 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_249, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.test_hive_250 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_250, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1104.ts EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1101 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1101 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1101 +as +select t1.* +from test_hive_1104 t1 +inner join test_hive_1102 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1102 +PREHOOK: Input: default@test_hive_1103 +PREHOOK: Input: default@test_hive_1104 +PREHOOK: Input: default@test_hive_1105 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1101 +POSTHOOK: query: create view test_hive_1101 +as +select t1.* +from test_hive_1104 t1 +inner join test_hive_1102 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1102 +POSTHOOK: Input: default@test_hive_1103 +POSTHOOK: Input: default@test_hive_1104 +POSTHOOK: Input: default@test_hive_1105 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1101 +POSTHOOK: Lineage: test_hive_1101.creation_date EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.ds EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.ds_ts SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.source_file_name SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1089 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1089, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1090 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1090, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_10902 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_10902, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1092 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1092, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1093 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1093, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1094 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1094, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1095 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1095, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1096 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1096, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1097 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1097, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1098 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1098, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_1099 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_1099, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_214 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_214, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_215 EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_215, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_216 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_216, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_217 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_217, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_218 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_218, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_219 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_219, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_220 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_220, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_221 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_221, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_222 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_222, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_223 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_223, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_224 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_224, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_225 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_225, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_226 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_226, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_227 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_227, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_228 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_228, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_229 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_229, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_230 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_230, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_231 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_231, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_232 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_232, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_233 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_233, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_234 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_234, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_235 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_235, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_236 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_236, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_237 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_237, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_238 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_238, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_239 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_239, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_240 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_240, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_241 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_241, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_242 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_242, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_243 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_243, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_244 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_244, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_245 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_245, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_246 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_246, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_247 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_247, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_248 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_248, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_249 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_249, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.test_hive_250 SIMPLE [(test_hive_1103)test_hive_1103.FieldSchema(name:test_hive_250, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1101.ts EXPRESSION [(test_hive_1103)test_hive_1103.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1250 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1250 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1250 +( + test_hive_1240 string + ,test_hive_1239 string + ,test_hive_1241 string + ,test_hive_300 string + ,test_hive_288 string + ,test_hive_294 string + ,test_hive_299 string + ,test_hive_297 string + ,test_hive_285 string + ,test_hive_292 string + ,test_hive_290 string + ,test_hive_291 string + ,test_hive_303 string + ,test_hive_304 string + ,test_hive_305 string + ,test_hive_286 string + ,test_hive_293 string + ,test_hive_287 string + ,test_hive_296 string + ,test_hive_295 string + ,test_hive_298 string + ,test_hive_282 string + ,test_hive_283 string + ,test_hive_284 string + ,test_hive_289 string + ,test_hive_302 string + ,test_hive_301 string + ,test_hive_281 string + ,test_hive_1233 string + ,test_hive_1234 string + ,test_hive_12342 string + ,test_hive_1236 string + ,test_hive_1237 string + ,test_hive_1238 string + ,test_hive_1243 string + ,test_hive_1243_lag string + ,test_hive_1242 string + ,test_hive_1232 string + ,test_hive_1243_bp string + ,test_hive_1243_lag_bp string + ,test_hive_1243_con string + ,test_hive_1243_lag_con string + ,test_hive_1249 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1250 +POSTHOOK: query: create table test_hive_1250 +( + test_hive_1240 string + ,test_hive_1239 string + ,test_hive_1241 string + ,test_hive_300 string + ,test_hive_288 string + ,test_hive_294 string + ,test_hive_299 string + ,test_hive_297 string + ,test_hive_285 string + ,test_hive_292 string + ,test_hive_290 string + ,test_hive_291 string + ,test_hive_303 string + ,test_hive_304 string + ,test_hive_305 string + ,test_hive_286 string + ,test_hive_293 string + ,test_hive_287 string + ,test_hive_296 string + ,test_hive_295 string + ,test_hive_298 string + ,test_hive_282 string + ,test_hive_283 string + ,test_hive_284 string + ,test_hive_289 string + ,test_hive_302 string + ,test_hive_301 string + ,test_hive_281 string + ,test_hive_1233 string + ,test_hive_1234 string + ,test_hive_12342 string + ,test_hive_1236 string + ,test_hive_1237 string + ,test_hive_1238 string + ,test_hive_1243 string + ,test_hive_1243_lag string + ,test_hive_1242 string + ,test_hive_1232 string + ,test_hive_1243_bp string + ,test_hive_1243_lag_bp string + ,test_hive_1243_con string + ,test_hive_1243_lag_con string + ,test_hive_1249 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1250 +PREHOOK: query: create table if not exists test_hive_1253 +( + test_hive_1240 string + ,test_hive_1239 string + ,test_hive_1241 string + ,test_hive_300 string + ,test_hive_288 string + ,test_hive_294 string + ,test_hive_299 string + ,test_hive_297 string + ,test_hive_285 string + ,test_hive_292 string + ,test_hive_290 string + ,test_hive_291 string + ,test_hive_303 string + ,test_hive_304 string + ,test_hive_305 string + ,test_hive_286 string + ,test_hive_293 string + ,test_hive_287 string + ,test_hive_296 string + ,test_hive_295 string + ,test_hive_298 string + ,test_hive_282 string + ,test_hive_283 string + ,test_hive_284 string + ,test_hive_289 string + ,test_hive_302 string + ,test_hive_301 string + ,test_hive_281 string + ,test_hive_1233 string + ,test_hive_1234 string + ,test_hive_12342 string + ,test_hive_1236 string + ,test_hive_1237 string + ,test_hive_1238 string + ,test_hive_1243 string + ,test_hive_1243_lag string + ,test_hive_1242 string + ,test_hive_1232 string + ,test_hive_1243_bp string + ,test_hive_1243_lag_bp string + ,test_hive_1243_con string + ,test_hive_1243_lag_con string + ,test_hive_1249 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1253 +POSTHOOK: query: create table if not exists test_hive_1253 +( + test_hive_1240 string + ,test_hive_1239 string + ,test_hive_1241 string + ,test_hive_300 string + ,test_hive_288 string + ,test_hive_294 string + ,test_hive_299 string + ,test_hive_297 string + ,test_hive_285 string + ,test_hive_292 string + ,test_hive_290 string + ,test_hive_291 string + ,test_hive_303 string + ,test_hive_304 string + ,test_hive_305 string + ,test_hive_286 string + ,test_hive_293 string + ,test_hive_287 string + ,test_hive_296 string + ,test_hive_295 string + ,test_hive_298 string + ,test_hive_282 string + ,test_hive_283 string + ,test_hive_284 string + ,test_hive_289 string + ,test_hive_302 string + ,test_hive_301 string + ,test_hive_281 string + ,test_hive_1233 string + ,test_hive_1234 string + ,test_hive_12342 string + ,test_hive_1236 string + ,test_hive_1237 string + ,test_hive_1238 string + ,test_hive_1243 string + ,test_hive_1243_lag string + ,test_hive_1242 string + ,test_hive_1232 string + ,test_hive_1243_bp string + ,test_hive_1243_lag_bp string + ,test_hive_1243_con string + ,test_hive_1243_lag_con string + ,test_hive_1249 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1253 +PREHOOK: query: drop table if exists test_hive_1252 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1252 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1252 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1252 +POSTHOOK: query: create table if not exists test_hive_1252 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1252 +PREHOOK: query: drop view if exists test_hive_1255 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1255 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1255 +as +select + cast(test_hive_1240 as int) as test_hive_1240 + ,cast(test_hive_1239 as int) as test_hive_1239 + ,cast(test_hive_1241 as int) as test_hive_1241 + ,cast(test_hive_300 as string) as test_hive_300 + ,cast(test_hive_288 as string) as test_hive_288 + ,cast(test_hive_294 as string) as test_hive_294 + ,cast(test_hive_299 as string) as test_hive_299 + ,cast(test_hive_297 as string) as test_hive_297 + ,cast(test_hive_285 as string) as test_hive_285 + ,cast(test_hive_292 as string) as test_hive_292 + ,cast(test_hive_290 as string) as test_hive_290 + ,cast(test_hive_291 as string) as test_hive_291 + ,cast(test_hive_303 as string) as test_hive_303 + ,cast(test_hive_304 as string) as test_hive_304 + ,cast(test_hive_305 as string) as test_hive_305 + ,cast(test_hive_286 as string) as test_hive_286 + ,cast(test_hive_293 as string) as test_hive_293 + ,cast(test_hive_287 as string) as test_hive_287 + ,cast(test_hive_296 as string) as test_hive_296 + ,cast(test_hive_295 as string) as test_hive_295 + ,cast(test_hive_298 as string) as test_hive_298 + ,cast(test_hive_282 as string) as test_hive_282 + ,cast(test_hive_283 as string) as test_hive_283 + ,cast(test_hive_284 as string) as test_hive_284 + ,cast(test_hive_289 as string) as test_hive_289 + ,cast(test_hive_302 as string) as test_hive_302 + ,cast(test_hive_301 as string) as test_hive_301 + ,cast(test_hive_281 as string) as test_hive_281 + ,cast(from_unixtime(unix_timestamp(test_hive_1233,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1233 + ,cast(test_hive_1234 as string) as test_hive_1234 + ,cast(test_hive_12342 as string) as test_hive_12342 + ,cast(test_hive_1236 as string) as test_hive_1236 + ,cast(test_hive_1237 as string) as test_hive_1237 + ,cast(test_hive_1238 as string) as test_hive_1238 + ,cast(test_hive_1243 as double) as test_hive_1243 + ,cast(test_hive_1243_lag as double) as test_hive_1243_lag + ,cast(test_hive_1242 as double) as test_hive_1242 + ,cast(test_hive_1232 as double) as test_hive_1232 + ,cast(test_hive_1243_bp as double) as test_hive_1243_bp + ,cast(test_hive_1243_lag_bp as double) as test_hive_1243_lag_bp + ,cast(test_hive_1243_con as double) as test_hive_1243_con + ,cast(test_hive_1243_lag_con as double) as test_hive_1243_lag_con + ,cast(from_unixtime(unix_timestamp(test_hive_1249,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1249 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1253 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1253 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1255 +POSTHOOK: query: create view if not exists test_hive_1255 +as +select + cast(test_hive_1240 as int) as test_hive_1240 + ,cast(test_hive_1239 as int) as test_hive_1239 + ,cast(test_hive_1241 as int) as test_hive_1241 + ,cast(test_hive_300 as string) as test_hive_300 + ,cast(test_hive_288 as string) as test_hive_288 + ,cast(test_hive_294 as string) as test_hive_294 + ,cast(test_hive_299 as string) as test_hive_299 + ,cast(test_hive_297 as string) as test_hive_297 + ,cast(test_hive_285 as string) as test_hive_285 + ,cast(test_hive_292 as string) as test_hive_292 + ,cast(test_hive_290 as string) as test_hive_290 + ,cast(test_hive_291 as string) as test_hive_291 + ,cast(test_hive_303 as string) as test_hive_303 + ,cast(test_hive_304 as string) as test_hive_304 + ,cast(test_hive_305 as string) as test_hive_305 + ,cast(test_hive_286 as string) as test_hive_286 + ,cast(test_hive_293 as string) as test_hive_293 + ,cast(test_hive_287 as string) as test_hive_287 + ,cast(test_hive_296 as string) as test_hive_296 + ,cast(test_hive_295 as string) as test_hive_295 + ,cast(test_hive_298 as string) as test_hive_298 + ,cast(test_hive_282 as string) as test_hive_282 + ,cast(test_hive_283 as string) as test_hive_283 + ,cast(test_hive_284 as string) as test_hive_284 + ,cast(test_hive_289 as string) as test_hive_289 + ,cast(test_hive_302 as string) as test_hive_302 + ,cast(test_hive_301 as string) as test_hive_301 + ,cast(test_hive_281 as string) as test_hive_281 + ,cast(from_unixtime(unix_timestamp(test_hive_1233,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1233 + ,cast(test_hive_1234 as string) as test_hive_1234 + ,cast(test_hive_12342 as string) as test_hive_12342 + ,cast(test_hive_1236 as string) as test_hive_1236 + ,cast(test_hive_1237 as string) as test_hive_1237 + ,cast(test_hive_1238 as string) as test_hive_1238 + ,cast(test_hive_1243 as double) as test_hive_1243 + ,cast(test_hive_1243_lag as double) as test_hive_1243_lag + ,cast(test_hive_1242 as double) as test_hive_1242 + ,cast(test_hive_1232 as double) as test_hive_1232 + ,cast(test_hive_1243_bp as double) as test_hive_1243_bp + ,cast(test_hive_1243_lag_bp as double) as test_hive_1243_lag_bp + ,cast(test_hive_1243_con as double) as test_hive_1243_con + ,cast(test_hive_1243_lag_con as double) as test_hive_1243_lag_con + ,cast(from_unixtime(unix_timestamp(test_hive_1249,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1249 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1253 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1253 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1255 +POSTHOOK: Lineage: test_hive_1255.creation_date EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.ds EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.ds_ts SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.source_file_name SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1232 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1232, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1233 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1233, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1234 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1234, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_12342 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_12342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1236 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1236, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1237 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1237, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1238 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1238, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1239 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1239, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1240 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1240, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1241 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1241, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1242 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1242, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1243 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1243_bp EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1243_con EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1243_lag EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1243_lag_bp EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1243_lag_con EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_1249 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1249, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_281 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_281, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_282 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_282, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_283 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_283, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_284 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_284, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_285 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_285, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_286 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_286, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_287 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_287, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_288 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_288, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_289 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_289, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_290 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_290, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_291 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_291, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_292 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_292, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_293 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_293, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_294 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_294, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_295 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_295, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_296 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_296, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_297 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_297, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_298 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_298, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_299 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_299, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_300 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_300, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_301 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_301, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_302 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_302, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_303 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_303, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_304 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_304, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.test_hive_305 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_305, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1255.ts EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1254 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1254 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1254 +as +select + test_hive_1240 as test_hive_1240 + ,test_hive_1239 as test_hive_1239 + ,test_hive_1241 as test_hive_1241 + ,test_hive_300 as test_hive_300 + ,test_hive_288 as test_hive_288 + ,test_hive_294 as test_hive_294 + ,test_hive_299 as test_hive_299 + ,test_hive_297 as test_hive_297 + ,test_hive_285 as test_hive_285 + ,test_hive_292 as test_hive_292 + ,test_hive_290 as test_hive_290 + ,test_hive_291 as test_hive_291 + ,test_hive_303 as test_hive_303 + ,test_hive_304 as test_hive_304 + ,test_hive_305 as test_hive_305 + ,test_hive_286 as test_hive_286 + ,test_hive_293 as test_hive_293 + ,test_hive_287 as test_hive_287 + ,test_hive_296 as test_hive_296 + ,test_hive_295 as test_hive_295 + ,test_hive_298 as test_hive_298 + ,test_hive_282 as test_hive_282 + ,test_hive_283 as test_hive_283 + ,test_hive_284 as test_hive_284 + ,test_hive_289 as test_hive_289 + ,test_hive_302 as test_hive_302 + ,test_hive_301 as test_hive_301 + ,test_hive_281 as test_hive_281 + ,test_hive_1233 as test_hive_1233 + ,test_hive_1234 as test_hive_1234 + ,test_hive_12342 as test_hive_12342 + ,test_hive_1236 as test_hive_1236 + ,test_hive_1237 as test_hive_1237 + ,test_hive_1238 as test_hive_1238 + ,test_hive_1243 as test_hive_1243 + ,test_hive_1243_lag as test_hive_1243_lag + ,test_hive_1242 as test_hive_1242 + ,test_hive_1232 as test_hive_1232 + ,test_hive_1243_bp as test_hive_1243_bp + ,test_hive_1243_lag_bp as test_hive_1243_lag_bp + ,test_hive_1243_con as test_hive_1243_con + ,test_hive_1243_lag_con as test_hive_1243_lag_con + ,test_hive_1249 as test_hive_1249 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1255 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1253 +PREHOOK: Input: default@test_hive_1255 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1254 +POSTHOOK: query: create view test_hive_1254 +as +select + test_hive_1240 as test_hive_1240 + ,test_hive_1239 as test_hive_1239 + ,test_hive_1241 as test_hive_1241 + ,test_hive_300 as test_hive_300 + ,test_hive_288 as test_hive_288 + ,test_hive_294 as test_hive_294 + ,test_hive_299 as test_hive_299 + ,test_hive_297 as test_hive_297 + ,test_hive_285 as test_hive_285 + ,test_hive_292 as test_hive_292 + ,test_hive_290 as test_hive_290 + ,test_hive_291 as test_hive_291 + ,test_hive_303 as test_hive_303 + ,test_hive_304 as test_hive_304 + ,test_hive_305 as test_hive_305 + ,test_hive_286 as test_hive_286 + ,test_hive_293 as test_hive_293 + ,test_hive_287 as test_hive_287 + ,test_hive_296 as test_hive_296 + ,test_hive_295 as test_hive_295 + ,test_hive_298 as test_hive_298 + ,test_hive_282 as test_hive_282 + ,test_hive_283 as test_hive_283 + ,test_hive_284 as test_hive_284 + ,test_hive_289 as test_hive_289 + ,test_hive_302 as test_hive_302 + ,test_hive_301 as test_hive_301 + ,test_hive_281 as test_hive_281 + ,test_hive_1233 as test_hive_1233 + ,test_hive_1234 as test_hive_1234 + ,test_hive_12342 as test_hive_12342 + ,test_hive_1236 as test_hive_1236 + ,test_hive_1237 as test_hive_1237 + ,test_hive_1238 as test_hive_1238 + ,test_hive_1243 as test_hive_1243 + ,test_hive_1243_lag as test_hive_1243_lag + ,test_hive_1242 as test_hive_1242 + ,test_hive_1232 as test_hive_1232 + ,test_hive_1243_bp as test_hive_1243_bp + ,test_hive_1243_lag_bp as test_hive_1243_lag_bp + ,test_hive_1243_con as test_hive_1243_con + ,test_hive_1243_lag_con as test_hive_1243_lag_con + ,test_hive_1249 as test_hive_1249 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1255 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1253 +POSTHOOK: Input: default@test_hive_1255 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1254 +POSTHOOK: Lineage: test_hive_1254.creation_date EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.ds EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.ds_ts SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.source_file_name SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1232 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1232, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1233 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1233, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1234 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1234, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_12342 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_12342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1236 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1236, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1237 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1237, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1238 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1238, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1239 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1239, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1240 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1240, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1241 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1241, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1242 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1242, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1243 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1243_bp EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1243_con EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1243_lag EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1243_lag_bp EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1243_lag_con EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_1249 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1249, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_281 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_281, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_282 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_282, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_283 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_283, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_284 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_284, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_285 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_285, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_286 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_286, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_287 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_287, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_288 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_288, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_289 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_289, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_290 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_290, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_291 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_291, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_292 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_292, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_293 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_293, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_294 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_294, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_295 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_295, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_296 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_296, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_297 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_297, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_298 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_298, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_299 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_299, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_300 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_300, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_301 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_301, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_302 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_302, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_303 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_303, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_304 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_304, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.test_hive_305 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_305, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1254.ts EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1251 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1251 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1251 +as +select t1.* +from test_hive_1254 t1 +inner join test_hive_1252 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1252 +PREHOOK: Input: default@test_hive_1253 +PREHOOK: Input: default@test_hive_1254 +PREHOOK: Input: default@test_hive_1255 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1251 +POSTHOOK: query: create view test_hive_1251 +as +select t1.* +from test_hive_1254 t1 +inner join test_hive_1252 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1252 +POSTHOOK: Input: default@test_hive_1253 +POSTHOOK: Input: default@test_hive_1254 +POSTHOOK: Input: default@test_hive_1255 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1251 +POSTHOOK: Lineage: test_hive_1251.creation_date EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.ds EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.ds_ts SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.source_file_name SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1232 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1232, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1233 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1233, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1234 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1234, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_12342 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_12342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1236 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1236, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1237 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1237, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1238 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1238, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1239 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1239, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1240 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1240, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1241 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1241, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1242 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1242, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1243 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1243_bp EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1243_con EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1243_lag EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1243_lag_bp EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1243_lag_con EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1243_lag_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_1249 EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_1249, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_281 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_281, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_282 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_282, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_283 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_283, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_284 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_284, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_285 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_285, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_286 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_286, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_287 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_287, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_288 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_288, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_289 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_289, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_290 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_290, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_291 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_291, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_292 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_292, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_293 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_293, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_294 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_294, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_295 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_295, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_296 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_296, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_297 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_297, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_298 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_298, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_299 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_299, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_300 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_300, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_301 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_301, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_302 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_302, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_303 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_303, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_304 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_304, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.test_hive_305 SIMPLE [(test_hive_1253)test_hive_1253.FieldSchema(name:test_hive_305, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1251.ts EXPRESSION [(test_hive_1253)test_hive_1253.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1373 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1373 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1373 +( + test_hive_1370 string + ,test_hive_1367 string + ,test_hive_1371 string + ,test_hive_1366 string + ,test_hive_338 string + ,test_hive_338_txt string + ,test_hive_340 string + ,test_hive_345 string + ,test_hive_345_txt string + ,test_hive_347 string + ,test_hive_348 string + ,test_hive_370 string + ,test_hive_373 string + ,test_hive_357 string + ,test_hive_375 string + ,test_hive_359 string + ,test_hive_341 string + ,test_hive_1368 string + ,test_hive_1369 string + ,test_hive_367 string + ,test_hive_354 string + ,test_hive_360 string + ,test_hive_349 string + ,test_hive_368 string + ,test_hive_369 string + ,test_hive_355 string + ,test_hive_342 string + ,test_hive_372 string + ,test_hive_363 string + ,test_hive_351 string + ,test_hive_365 string + ,test_hive_352 string + ,test_hive_366 string + ,test_hive_353 string + ,test_hive_364 string + ,test_hive_1381 string + ,test_hive_358 string + ,test_hive_1379 string + ,test_hive_362 string + ,test_hive_1380 string + ,test_hive_361 string + ,test_hive_350 string + ,test_hive_374 string + ,test_hive_343 string + ,test_hive_343_txt string + ,test_hive_371 string + ,test_hive_356 string + ,test_hive_1372 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1373 +POSTHOOK: query: create table test_hive_1373 +( + test_hive_1370 string + ,test_hive_1367 string + ,test_hive_1371 string + ,test_hive_1366 string + ,test_hive_338 string + ,test_hive_338_txt string + ,test_hive_340 string + ,test_hive_345 string + ,test_hive_345_txt string + ,test_hive_347 string + ,test_hive_348 string + ,test_hive_370 string + ,test_hive_373 string + ,test_hive_357 string + ,test_hive_375 string + ,test_hive_359 string + ,test_hive_341 string + ,test_hive_1368 string + ,test_hive_1369 string + ,test_hive_367 string + ,test_hive_354 string + ,test_hive_360 string + ,test_hive_349 string + ,test_hive_368 string + ,test_hive_369 string + ,test_hive_355 string + ,test_hive_342 string + ,test_hive_372 string + ,test_hive_363 string + ,test_hive_351 string + ,test_hive_365 string + ,test_hive_352 string + ,test_hive_366 string + ,test_hive_353 string + ,test_hive_364 string + ,test_hive_1381 string + ,test_hive_358 string + ,test_hive_1379 string + ,test_hive_362 string + ,test_hive_1380 string + ,test_hive_361 string + ,test_hive_350 string + ,test_hive_374 string + ,test_hive_343 string + ,test_hive_343_txt string + ,test_hive_371 string + ,test_hive_356 string + ,test_hive_1372 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1373 +PREHOOK: query: create table if not exists test_hive_1376 +( + test_hive_1370 string + ,test_hive_1367 string + ,test_hive_1371 string + ,test_hive_1366 string + ,test_hive_338 string + ,test_hive_338_txt string + ,test_hive_340 string + ,test_hive_345 string + ,test_hive_345_txt string + ,test_hive_347 string + ,test_hive_348 string + ,test_hive_370 string + ,test_hive_373 string + ,test_hive_357 string + ,test_hive_375 string + ,test_hive_359 string + ,test_hive_341 string + ,test_hive_1368 string + ,test_hive_1369 string + ,test_hive_367 string + ,test_hive_354 string + ,test_hive_360 string + ,test_hive_349 string + ,test_hive_368 string + ,test_hive_369 string + ,test_hive_355 string + ,test_hive_342 string + ,test_hive_372 string + ,test_hive_363 string + ,test_hive_351 string + ,test_hive_365 string + ,test_hive_352 string + ,test_hive_366 string + ,test_hive_353 string + ,test_hive_364 string + ,test_hive_1381 string + ,test_hive_358 string + ,test_hive_1379 string + ,test_hive_362 string + ,test_hive_1380 string + ,test_hive_361 string + ,test_hive_350 string + ,test_hive_374 string + ,test_hive_343 string + ,test_hive_343_txt string + ,test_hive_371 string + ,test_hive_356 string + ,test_hive_1372 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1376 +POSTHOOK: query: create table if not exists test_hive_1376 +( + test_hive_1370 string + ,test_hive_1367 string + ,test_hive_1371 string + ,test_hive_1366 string + ,test_hive_338 string + ,test_hive_338_txt string + ,test_hive_340 string + ,test_hive_345 string + ,test_hive_345_txt string + ,test_hive_347 string + ,test_hive_348 string + ,test_hive_370 string + ,test_hive_373 string + ,test_hive_357 string + ,test_hive_375 string + ,test_hive_359 string + ,test_hive_341 string + ,test_hive_1368 string + ,test_hive_1369 string + ,test_hive_367 string + ,test_hive_354 string + ,test_hive_360 string + ,test_hive_349 string + ,test_hive_368 string + ,test_hive_369 string + ,test_hive_355 string + ,test_hive_342 string + ,test_hive_372 string + ,test_hive_363 string + ,test_hive_351 string + ,test_hive_365 string + ,test_hive_352 string + ,test_hive_366 string + ,test_hive_353 string + ,test_hive_364 string + ,test_hive_1381 string + ,test_hive_358 string + ,test_hive_1379 string + ,test_hive_362 string + ,test_hive_1380 string + ,test_hive_361 string + ,test_hive_350 string + ,test_hive_374 string + ,test_hive_343 string + ,test_hive_343_txt string + ,test_hive_371 string + ,test_hive_356 string + ,test_hive_1372 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1376 +PREHOOK: query: drop table if exists test_hive_1375 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1375 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1375 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1375 +POSTHOOK: query: create table if not exists test_hive_1375 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1375 +PREHOOK: query: drop view if exists test_hive_1378 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1378 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1378 +as +select + cast(test_hive_1370 as int) as test_hive_1370 + ,cast(test_hive_1367 as int) as test_hive_1367 + ,cast(test_hive_1371 as int) as test_hive_1371 + ,cast(test_hive_1366 as string) as test_hive_1366 + ,cast(test_hive_338 as string) as test_hive_338 + ,cast(test_hive_338_txt as string) as test_hive_338_txt + ,cast(test_hive_340 as string) as test_hive_340 + ,cast(test_hive_345 as string) as test_hive_345 + ,cast(test_hive_345_txt as string) as test_hive_345_txt + ,cast(test_hive_347 as string) as test_hive_347 + ,cast(test_hive_348 as string) as test_hive_348 + ,cast(test_hive_370 as string) as test_hive_370 + ,cast(test_hive_373 as string) as test_hive_373 + ,cast(test_hive_357 as string) as test_hive_357 + ,cast(test_hive_375 as string) as test_hive_375 + ,cast(test_hive_359 as string) as test_hive_359 + ,cast(test_hive_341 as string) as test_hive_341 + ,cast(test_hive_1368 as int) as test_hive_1368 + ,cast(test_hive_1369 as int) as test_hive_1369 + ,cast(test_hive_367 as string) as test_hive_367 + ,cast(test_hive_354 as string) as test_hive_354 + ,cast(test_hive_360 as string) as test_hive_360 + ,cast(test_hive_349 as string) as test_hive_349 + ,cast(test_hive_368 as string) as test_hive_368 + ,cast(test_hive_369 as string) as test_hive_369 + ,cast(test_hive_355 as string) as test_hive_355 + ,cast(from_unixtime(unix_timestamp(test_hive_342,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_342 + ,cast(test_hive_372 as string) as test_hive_372 + ,cast(test_hive_363 as string) as test_hive_363 + ,cast(test_hive_351 as string) as test_hive_351 + ,cast(test_hive_365 as string) as test_hive_365 + ,cast(test_hive_352 as string) as test_hive_352 + ,cast(test_hive_366 as string) as test_hive_366 + ,cast(test_hive_353 as string) as test_hive_353 + ,cast(test_hive_364 as string) as test_hive_364 + ,cast(test_hive_1381 as string) as test_hive_1381 + ,cast(test_hive_358 as string) as test_hive_358 + ,cast(test_hive_1379 as string) as test_hive_1379 + ,cast(test_hive_362 as string) as test_hive_362 + ,cast(test_hive_1380 as string) as test_hive_1380 + ,cast(test_hive_361 as string) as test_hive_361 + ,cast(test_hive_350 as string) as test_hive_350 + ,cast(test_hive_374 as string) as test_hive_374 + ,cast(test_hive_343 as string) as test_hive_343 + ,cast(test_hive_343_txt as string) as test_hive_343_txt + ,cast(test_hive_371 as string) as test_hive_371 + ,cast(test_hive_356 as string) as test_hive_356 + ,cast(from_unixtime(unix_timestamp(test_hive_1372,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1372 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1376 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1376 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1378 +POSTHOOK: query: create view if not exists test_hive_1378 +as +select + cast(test_hive_1370 as int) as test_hive_1370 + ,cast(test_hive_1367 as int) as test_hive_1367 + ,cast(test_hive_1371 as int) as test_hive_1371 + ,cast(test_hive_1366 as string) as test_hive_1366 + ,cast(test_hive_338 as string) as test_hive_338 + ,cast(test_hive_338_txt as string) as test_hive_338_txt + ,cast(test_hive_340 as string) as test_hive_340 + ,cast(test_hive_345 as string) as test_hive_345 + ,cast(test_hive_345_txt as string) as test_hive_345_txt + ,cast(test_hive_347 as string) as test_hive_347 + ,cast(test_hive_348 as string) as test_hive_348 + ,cast(test_hive_370 as string) as test_hive_370 + ,cast(test_hive_373 as string) as test_hive_373 + ,cast(test_hive_357 as string) as test_hive_357 + ,cast(test_hive_375 as string) as test_hive_375 + ,cast(test_hive_359 as string) as test_hive_359 + ,cast(test_hive_341 as string) as test_hive_341 + ,cast(test_hive_1368 as int) as test_hive_1368 + ,cast(test_hive_1369 as int) as test_hive_1369 + ,cast(test_hive_367 as string) as test_hive_367 + ,cast(test_hive_354 as string) as test_hive_354 + ,cast(test_hive_360 as string) as test_hive_360 + ,cast(test_hive_349 as string) as test_hive_349 + ,cast(test_hive_368 as string) as test_hive_368 + ,cast(test_hive_369 as string) as test_hive_369 + ,cast(test_hive_355 as string) as test_hive_355 + ,cast(from_unixtime(unix_timestamp(test_hive_342,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_342 + ,cast(test_hive_372 as string) as test_hive_372 + ,cast(test_hive_363 as string) as test_hive_363 + ,cast(test_hive_351 as string) as test_hive_351 + ,cast(test_hive_365 as string) as test_hive_365 + ,cast(test_hive_352 as string) as test_hive_352 + ,cast(test_hive_366 as string) as test_hive_366 + ,cast(test_hive_353 as string) as test_hive_353 + ,cast(test_hive_364 as string) as test_hive_364 + ,cast(test_hive_1381 as string) as test_hive_1381 + ,cast(test_hive_358 as string) as test_hive_358 + ,cast(test_hive_1379 as string) as test_hive_1379 + ,cast(test_hive_362 as string) as test_hive_362 + ,cast(test_hive_1380 as string) as test_hive_1380 + ,cast(test_hive_361 as string) as test_hive_361 + ,cast(test_hive_350 as string) as test_hive_350 + ,cast(test_hive_374 as string) as test_hive_374 + ,cast(test_hive_343 as string) as test_hive_343 + ,cast(test_hive_343_txt as string) as test_hive_343_txt + ,cast(test_hive_371 as string) as test_hive_371 + ,cast(test_hive_356 as string) as test_hive_356 + ,cast(from_unixtime(unix_timestamp(test_hive_1372,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1372 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1376 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1376 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1378 +POSTHOOK: Lineage: test_hive_1378.creation_date EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.ds EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.ds_ts SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.source_file_name SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1366 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1366, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1367 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1367, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1368 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1368, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1369 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1369, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1370 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1370, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1371 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1371, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1372 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1372, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1379 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1379, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1380 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1380, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_1381 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1381, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_338 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_338, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_338_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_338_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_340 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_340, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_341 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_341, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_342 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_343 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_343, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_343_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_343_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_345 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_345, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_345_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_345_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_347 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_347, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_348 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_348, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_349 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_349, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_350 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_350, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_351 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_351, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_352 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_352, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_353 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_353, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_354 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_354, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_355 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_355, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_356 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_356, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_357 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_357, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_358 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_358, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_359 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_359, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_360 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_360, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_361 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_361, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_362 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_362, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_363 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_363, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_364 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_364, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_365 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_365, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_366 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_366, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_367 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_367, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_368 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_368, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_369 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_369, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_370 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_370, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_371 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_371, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_372 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_372, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_373 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_373, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_374 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_374, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.test_hive_375 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_375, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1378.ts EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1377 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1377 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1377 +as +select + test_hive_1370 as test_hive_1370 + ,test_hive_1367 as test_hive_1367 + ,test_hive_1371 as test_hive_1371 + ,test_hive_1366 as test_hive_1366 + ,test_hive_338 as test_hive_338 + ,test_hive_338_txt as test_hive_338_txt + ,test_hive_340 as test_hive_340 + ,test_hive_345 as test_hive_345 + ,test_hive_345_txt as test_hive_345_txt + ,test_hive_347 as test_hive_347 + ,test_hive_348 as test_hive_348 + ,test_hive_370 as test_hive_370 + ,test_hive_373 as test_hive_373 + ,test_hive_357 as test_hive_357 + ,test_hive_375 as test_hive_375 + ,test_hive_359 as test_hive_359 + ,test_hive_341 as test_hive_341 + ,test_hive_1368 as test_hive_1368 + ,test_hive_1369 as test_hive_1369 + ,test_hive_367 as test_hive_367 + ,test_hive_354 as test_hive_354 + ,test_hive_360 as test_hive_360 + ,test_hive_349 as test_hive_349 + ,test_hive_368 as test_hive_368 + ,test_hive_369 as test_hive_369 + ,test_hive_355 as test_hive_355 + ,test_hive_342 as test_hive_342 + ,test_hive_372 as test_hive_372 + ,test_hive_363 as test_hive_363 + ,test_hive_351 as test_hive_351 + ,test_hive_365 as test_hive_365 + ,test_hive_352 as test_hive_352 + ,test_hive_366 as test_hive_366 + ,test_hive_353 as test_hive_353 + ,test_hive_364 as test_hive_364 + ,test_hive_1381 as test_hive_1381 + ,test_hive_358 as test_hive_358 + ,test_hive_1379 as test_hive_1379 + ,test_hive_362 as test_hive_362 + ,test_hive_1380 as test_hive_1380 + ,test_hive_361 as test_hive_361 + ,test_hive_350 as test_hive_350 + ,test_hive_374 as test_hive_374 + ,test_hive_343 as test_hive_343 + ,test_hive_343_txt as test_hive_343_txt + ,test_hive_371 as test_hive_371 + ,test_hive_356 as test_hive_356 + ,test_hive_1372 as test_hive_1372 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1378 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1376 +PREHOOK: Input: default@test_hive_1378 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1377 +POSTHOOK: query: create view test_hive_1377 +as +select + test_hive_1370 as test_hive_1370 + ,test_hive_1367 as test_hive_1367 + ,test_hive_1371 as test_hive_1371 + ,test_hive_1366 as test_hive_1366 + ,test_hive_338 as test_hive_338 + ,test_hive_338_txt as test_hive_338_txt + ,test_hive_340 as test_hive_340 + ,test_hive_345 as test_hive_345 + ,test_hive_345_txt as test_hive_345_txt + ,test_hive_347 as test_hive_347 + ,test_hive_348 as test_hive_348 + ,test_hive_370 as test_hive_370 + ,test_hive_373 as test_hive_373 + ,test_hive_357 as test_hive_357 + ,test_hive_375 as test_hive_375 + ,test_hive_359 as test_hive_359 + ,test_hive_341 as test_hive_341 + ,test_hive_1368 as test_hive_1368 + ,test_hive_1369 as test_hive_1369 + ,test_hive_367 as test_hive_367 + ,test_hive_354 as test_hive_354 + ,test_hive_360 as test_hive_360 + ,test_hive_349 as test_hive_349 + ,test_hive_368 as test_hive_368 + ,test_hive_369 as test_hive_369 + ,test_hive_355 as test_hive_355 + ,test_hive_342 as test_hive_342 + ,test_hive_372 as test_hive_372 + ,test_hive_363 as test_hive_363 + ,test_hive_351 as test_hive_351 + ,test_hive_365 as test_hive_365 + ,test_hive_352 as test_hive_352 + ,test_hive_366 as test_hive_366 + ,test_hive_353 as test_hive_353 + ,test_hive_364 as test_hive_364 + ,test_hive_1381 as test_hive_1381 + ,test_hive_358 as test_hive_358 + ,test_hive_1379 as test_hive_1379 + ,test_hive_362 as test_hive_362 + ,test_hive_1380 as test_hive_1380 + ,test_hive_361 as test_hive_361 + ,test_hive_350 as test_hive_350 + ,test_hive_374 as test_hive_374 + ,test_hive_343 as test_hive_343 + ,test_hive_343_txt as test_hive_343_txt + ,test_hive_371 as test_hive_371 + ,test_hive_356 as test_hive_356 + ,test_hive_1372 as test_hive_1372 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1378 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1376 +POSTHOOK: Input: default@test_hive_1378 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1377 +POSTHOOK: Lineage: test_hive_1377.creation_date EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.ds EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.ds_ts SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.source_file_name SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1366 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1366, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1367 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1367, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1368 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1368, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1369 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1369, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1370 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1370, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1371 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1371, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1372 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1372, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1379 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1379, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1380 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1380, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_1381 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1381, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_338 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_338, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_338_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_338_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_340 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_340, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_341 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_341, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_342 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_343 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_343, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_343_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_343_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_345 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_345, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_345_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_345_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_347 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_347, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_348 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_348, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_349 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_349, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_350 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_350, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_351 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_351, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_352 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_352, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_353 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_353, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_354 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_354, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_355 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_355, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_356 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_356, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_357 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_357, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_358 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_358, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_359 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_359, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_360 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_360, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_361 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_361, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_362 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_362, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_363 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_363, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_364 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_364, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_365 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_365, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_366 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_366, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_367 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_367, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_368 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_368, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_369 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_369, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_370 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_370, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_371 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_371, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_372 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_372, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_373 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_373, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_374 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_374, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.test_hive_375 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_375, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1377.ts EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1374 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1374 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1374 +as +select t1.* +from test_hive_1377 t1 +inner join test_hive_1375 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1375 +PREHOOK: Input: default@test_hive_1376 +PREHOOK: Input: default@test_hive_1377 +PREHOOK: Input: default@test_hive_1378 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1374 +POSTHOOK: query: create view test_hive_1374 +as +select t1.* +from test_hive_1377 t1 +inner join test_hive_1375 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1375 +POSTHOOK: Input: default@test_hive_1376 +POSTHOOK: Input: default@test_hive_1377 +POSTHOOK: Input: default@test_hive_1378 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1374 +POSTHOOK: Lineage: test_hive_1374.creation_date EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.ds EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.ds_ts SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.source_file_name SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1366 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1366, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1367 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1367, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1368 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1368, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1369 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1369, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1370 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1370, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1371 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1371, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1372 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1372, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1379 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1379, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1380 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1380, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_1381 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_1381, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_338 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_338, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_338_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_338_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_340 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_340, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_341 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_341, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_342 EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_343 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_343, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_343_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_343_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_345 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_345, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_345_txt SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_345_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_347 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_347, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_348 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_348, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_349 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_349, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_350 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_350, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_351 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_351, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_352 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_352, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_353 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_353, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_354 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_354, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_355 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_355, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_356 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_356, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_357 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_357, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_358 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_358, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_359 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_359, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_360 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_360, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_361 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_361, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_362 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_362, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_363 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_363, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_364 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_364, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_365 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_365, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_366 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_366, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_367 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_367, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_368 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_368, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_369 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_369, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_370 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_370, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_371 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_371, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_372 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_372, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_373 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_373, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_374 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_374, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.test_hive_375 SIMPLE [(test_hive_1376)test_hive_1376.FieldSchema(name:test_hive_375, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1374.ts EXPRESSION [(test_hive_1376)test_hive_1376.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1397 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1397 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1397 +( + test_hive_1394 string + ,test_hive_1383 string + ,test_hive_1395 string + ,test_hive_1382 string + ,test_hive_407 string + ,test_hive_397 string + ,test_hive_379 string + ,test_hive_408 string + ,test_hive_1384 string + ,test_hive_1385 string + ,test_hive_1386 string + ,test_hive_1387 string + ,test_hive_1388 string + ,test_hive_1389 string + ,test_hive_1390 string + ,test_hive_1391 string + ,test_hive_1392 string + ,test_hive_1393 string + ,test_hive_400 string + ,test_hive_386 string + ,test_hive_409 string + ,test_hive_390 string + ,test_hive_381 string + ,test_hive_380 string + ,test_hive_382 string + ,test_hive_382_txt string + ,test_hive_410 string + ,test_hive_391 string + ,test_hive_403 string + ,test_hive_388 string + ,test_hive_405 string + ,test_hive_389 string + ,test_hive_393 string + ,test_hive_376 string + ,test_hive_394 string + ,test_hive_377 string + ,test_hive_395 string + ,test_hive_378 string + ,test_hive_406 string + ,test_hive_1406 string + ,test_hive_404 string + ,test_hive_1405 string + ,test_hive_396 string + ,test_hive_1403 string + ,test_hive_402 string + ,test_hive_1404 string + ,test_hive_398 string + ,test_hive_384 string + ,test_hive_399 string + ,test_hive_385 string + ,test_hive_411 string + ,test_hive_392 string + ,test_hive_401 string + ,test_hive_387 string + ,test_hive_1396 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1397 +POSTHOOK: query: create table test_hive_1397 +( + test_hive_1394 string + ,test_hive_1383 string + ,test_hive_1395 string + ,test_hive_1382 string + ,test_hive_407 string + ,test_hive_397 string + ,test_hive_379 string + ,test_hive_408 string + ,test_hive_1384 string + ,test_hive_1385 string + ,test_hive_1386 string + ,test_hive_1387 string + ,test_hive_1388 string + ,test_hive_1389 string + ,test_hive_1390 string + ,test_hive_1391 string + ,test_hive_1392 string + ,test_hive_1393 string + ,test_hive_400 string + ,test_hive_386 string + ,test_hive_409 string + ,test_hive_390 string + ,test_hive_381 string + ,test_hive_380 string + ,test_hive_382 string + ,test_hive_382_txt string + ,test_hive_410 string + ,test_hive_391 string + ,test_hive_403 string + ,test_hive_388 string + ,test_hive_405 string + ,test_hive_389 string + ,test_hive_393 string + ,test_hive_376 string + ,test_hive_394 string + ,test_hive_377 string + ,test_hive_395 string + ,test_hive_378 string + ,test_hive_406 string + ,test_hive_1406 string + ,test_hive_404 string + ,test_hive_1405 string + ,test_hive_396 string + ,test_hive_1403 string + ,test_hive_402 string + ,test_hive_1404 string + ,test_hive_398 string + ,test_hive_384 string + ,test_hive_399 string + ,test_hive_385 string + ,test_hive_411 string + ,test_hive_392 string + ,test_hive_401 string + ,test_hive_387 string + ,test_hive_1396 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1397 +PREHOOK: query: create table if not exists test_hive_1400 +( + test_hive_1394 string + ,test_hive_1383 string + ,test_hive_1395 string + ,test_hive_1382 string + ,test_hive_407 string + ,test_hive_397 string + ,test_hive_379 string + ,test_hive_408 string + ,test_hive_1384 string + ,test_hive_1385 string + ,test_hive_1386 string + ,test_hive_1387 string + ,test_hive_1388 string + ,test_hive_1389 string + ,test_hive_1390 string + ,test_hive_1391 string + ,test_hive_1392 string + ,test_hive_1393 string + ,test_hive_400 string + ,test_hive_386 string + ,test_hive_409 string + ,test_hive_390 string + ,test_hive_381 string + ,test_hive_380 string + ,test_hive_382 string + ,test_hive_382_txt string + ,test_hive_410 string + ,test_hive_391 string + ,test_hive_403 string + ,test_hive_388 string + ,test_hive_405 string + ,test_hive_389 string + ,test_hive_393 string + ,test_hive_376 string + ,test_hive_394 string + ,test_hive_377 string + ,test_hive_395 string + ,test_hive_378 string + ,test_hive_406 string + ,test_hive_1406 string + ,test_hive_404 string + ,test_hive_1405 string + ,test_hive_396 string + ,test_hive_1403 string + ,test_hive_402 string + ,test_hive_1404 string + ,test_hive_398 string + ,test_hive_384 string + ,test_hive_399 string + ,test_hive_385 string + ,test_hive_411 string + ,test_hive_392 string + ,test_hive_401 string + ,test_hive_387 string + ,test_hive_1396 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1400 +POSTHOOK: query: create table if not exists test_hive_1400 +( + test_hive_1394 string + ,test_hive_1383 string + ,test_hive_1395 string + ,test_hive_1382 string + ,test_hive_407 string + ,test_hive_397 string + ,test_hive_379 string + ,test_hive_408 string + ,test_hive_1384 string + ,test_hive_1385 string + ,test_hive_1386 string + ,test_hive_1387 string + ,test_hive_1388 string + ,test_hive_1389 string + ,test_hive_1390 string + ,test_hive_1391 string + ,test_hive_1392 string + ,test_hive_1393 string + ,test_hive_400 string + ,test_hive_386 string + ,test_hive_409 string + ,test_hive_390 string + ,test_hive_381 string + ,test_hive_380 string + ,test_hive_382 string + ,test_hive_382_txt string + ,test_hive_410 string + ,test_hive_391 string + ,test_hive_403 string + ,test_hive_388 string + ,test_hive_405 string + ,test_hive_389 string + ,test_hive_393 string + ,test_hive_376 string + ,test_hive_394 string + ,test_hive_377 string + ,test_hive_395 string + ,test_hive_378 string + ,test_hive_406 string + ,test_hive_1406 string + ,test_hive_404 string + ,test_hive_1405 string + ,test_hive_396 string + ,test_hive_1403 string + ,test_hive_402 string + ,test_hive_1404 string + ,test_hive_398 string + ,test_hive_384 string + ,test_hive_399 string + ,test_hive_385 string + ,test_hive_411 string + ,test_hive_392 string + ,test_hive_401 string + ,test_hive_387 string + ,test_hive_1396 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1400 +PREHOOK: query: drop table if exists test_hive_1399 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1399 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1399 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1399 +POSTHOOK: query: create table if not exists test_hive_1399 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1399 +PREHOOK: query: drop view if exists test_hive_1402 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1402 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1402 +as +select + cast(test_hive_1394 as int) as test_hive_1394 + ,cast(test_hive_1383 as int) as test_hive_1383 + ,cast(test_hive_1395 as int) as test_hive_1395 + ,cast(test_hive_1382 as string) as test_hive_1382 + ,cast(test_hive_407 as string) as test_hive_407 + ,cast(test_hive_397 as string) as test_hive_397 + ,cast(test_hive_379 as string) as test_hive_379 + ,cast(test_hive_408 as string) as test_hive_408 + ,cast(test_hive_1384 as int) as test_hive_1384 + ,cast(test_hive_1385 as int) as test_hive_1385 + ,cast(test_hive_1386 as int) as test_hive_1386 + ,cast(test_hive_1387 as int) as test_hive_1387 + ,cast(test_hive_1388 as int) as test_hive_1388 + ,cast(test_hive_1389 as double) as test_hive_1389 + ,cast(test_hive_1390 as double) as test_hive_1390 + ,cast(test_hive_1391 as int) as test_hive_1391 + ,cast(test_hive_1392 as int) as test_hive_1392 + ,cast(test_hive_1393 as int) as test_hive_1393 + ,cast(test_hive_400 as string) as test_hive_400 + ,cast(test_hive_386 as string) as test_hive_386 + ,cast(test_hive_409 as string) as test_hive_409 + ,cast(test_hive_390 as string) as test_hive_390 + ,cast(from_unixtime(unix_timestamp(test_hive_381,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_381 + ,cast(test_hive_380 as string) as test_hive_380 + ,cast(test_hive_382 as string) as test_hive_382 + ,cast(test_hive_382_txt as string) as test_hive_382_txt + ,cast(test_hive_410 as string) as test_hive_410 + ,cast(test_hive_391 as string) as test_hive_391 + ,cast(test_hive_403 as string) as test_hive_403 + ,cast(test_hive_388 as string) as test_hive_388 + ,cast(test_hive_405 as string) as test_hive_405 + ,cast(test_hive_389 as string) as test_hive_389 + ,cast(test_hive_393 as string) as test_hive_393 + ,cast(test_hive_376 as string) as test_hive_376 + ,cast(test_hive_394 as string) as test_hive_394 + ,cast(test_hive_377 as string) as test_hive_377 + ,cast(test_hive_395 as string) as test_hive_395 + ,cast(test_hive_378 as string) as test_hive_378 + ,cast(test_hive_406 as string) as test_hive_406 + ,cast(test_hive_1406 as string) as test_hive_1406 + ,cast(test_hive_404 as string) as test_hive_404 + ,cast(test_hive_1405 as string) as test_hive_1405 + ,cast(test_hive_396 as string) as test_hive_396 + ,cast(test_hive_1403 as string) as test_hive_1403 + ,cast(test_hive_402 as string) as test_hive_402 + ,cast(test_hive_1404 as string) as test_hive_1404 + ,cast(test_hive_398 as string) as test_hive_398 + ,cast(test_hive_384 as string) as test_hive_384 + ,cast(test_hive_399 as string) as test_hive_399 + ,cast(test_hive_385 as string) as test_hive_385 + ,cast(test_hive_411 as string) as test_hive_411 + ,cast(test_hive_392 as string) as test_hive_392 + ,cast(test_hive_401 as string) as test_hive_401 + ,cast(test_hive_387 as string) as test_hive_387 + ,cast(from_unixtime(unix_timestamp(test_hive_1396,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1396 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1400 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1400 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1402 +POSTHOOK: query: create view if not exists test_hive_1402 +as +select + cast(test_hive_1394 as int) as test_hive_1394 + ,cast(test_hive_1383 as int) as test_hive_1383 + ,cast(test_hive_1395 as int) as test_hive_1395 + ,cast(test_hive_1382 as string) as test_hive_1382 + ,cast(test_hive_407 as string) as test_hive_407 + ,cast(test_hive_397 as string) as test_hive_397 + ,cast(test_hive_379 as string) as test_hive_379 + ,cast(test_hive_408 as string) as test_hive_408 + ,cast(test_hive_1384 as int) as test_hive_1384 + ,cast(test_hive_1385 as int) as test_hive_1385 + ,cast(test_hive_1386 as int) as test_hive_1386 + ,cast(test_hive_1387 as int) as test_hive_1387 + ,cast(test_hive_1388 as int) as test_hive_1388 + ,cast(test_hive_1389 as double) as test_hive_1389 + ,cast(test_hive_1390 as double) as test_hive_1390 + ,cast(test_hive_1391 as int) as test_hive_1391 + ,cast(test_hive_1392 as int) as test_hive_1392 + ,cast(test_hive_1393 as int) as test_hive_1393 + ,cast(test_hive_400 as string) as test_hive_400 + ,cast(test_hive_386 as string) as test_hive_386 + ,cast(test_hive_409 as string) as test_hive_409 + ,cast(test_hive_390 as string) as test_hive_390 + ,cast(from_unixtime(unix_timestamp(test_hive_381,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_381 + ,cast(test_hive_380 as string) as test_hive_380 + ,cast(test_hive_382 as string) as test_hive_382 + ,cast(test_hive_382_txt as string) as test_hive_382_txt + ,cast(test_hive_410 as string) as test_hive_410 + ,cast(test_hive_391 as string) as test_hive_391 + ,cast(test_hive_403 as string) as test_hive_403 + ,cast(test_hive_388 as string) as test_hive_388 + ,cast(test_hive_405 as string) as test_hive_405 + ,cast(test_hive_389 as string) as test_hive_389 + ,cast(test_hive_393 as string) as test_hive_393 + ,cast(test_hive_376 as string) as test_hive_376 + ,cast(test_hive_394 as string) as test_hive_394 + ,cast(test_hive_377 as string) as test_hive_377 + ,cast(test_hive_395 as string) as test_hive_395 + ,cast(test_hive_378 as string) as test_hive_378 + ,cast(test_hive_406 as string) as test_hive_406 + ,cast(test_hive_1406 as string) as test_hive_1406 + ,cast(test_hive_404 as string) as test_hive_404 + ,cast(test_hive_1405 as string) as test_hive_1405 + ,cast(test_hive_396 as string) as test_hive_396 + ,cast(test_hive_1403 as string) as test_hive_1403 + ,cast(test_hive_402 as string) as test_hive_402 + ,cast(test_hive_1404 as string) as test_hive_1404 + ,cast(test_hive_398 as string) as test_hive_398 + ,cast(test_hive_384 as string) as test_hive_384 + ,cast(test_hive_399 as string) as test_hive_399 + ,cast(test_hive_385 as string) as test_hive_385 + ,cast(test_hive_411 as string) as test_hive_411 + ,cast(test_hive_392 as string) as test_hive_392 + ,cast(test_hive_401 as string) as test_hive_401 + ,cast(test_hive_387 as string) as test_hive_387 + ,cast(from_unixtime(unix_timestamp(test_hive_1396,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1396 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1400 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1400 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1402 +POSTHOOK: Lineage: test_hive_1402.creation_date EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.ds EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.ds_ts SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.source_file_name SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1382 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1382, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1383 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1383, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1384 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1384, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1385 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1385, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1386 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1386, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1387 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1387, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1388 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1388, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1389 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1389, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1390 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1390, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1391 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1391, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1392 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1392, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1393 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1393, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1394 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1394, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1395 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1395, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1396 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1396, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1403 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1403, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1404 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1404, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1405 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1405, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_1406 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1406, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_376 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_376, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_377 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_377, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_378 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_378, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_379 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_379, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_380 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_380, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_381 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_381, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_382 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_382, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_382_txt SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_382_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_384 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_384, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_385 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_385, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_386 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_386, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_387 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_387, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_388 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_388, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_389 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_389, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_390 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_390, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_391 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_391, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_392 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_392, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_393 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_393, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_394 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_394, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_395 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_395, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_396 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_396, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_397 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_397, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_398 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_398, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_399 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_399, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_400 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_400, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_401 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_401, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_402 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_402, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_403 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_403, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_404 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_404, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_405 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_405, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_406 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_406, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_407 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_407, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_408 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_408, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_409 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_409, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_410 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_410, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.test_hive_411 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_411, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1402.ts EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1401 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1401 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1401 +as +select + test_hive_1394 as test_hive_1394 + ,test_hive_1383 as test_hive_1383 + ,test_hive_1395 as test_hive_1395 + ,test_hive_1382 as test_hive_1382 + ,test_hive_407 as test_hive_407 + ,test_hive_397 as test_hive_397 + ,test_hive_379 as test_hive_379 + ,test_hive_408 as test_hive_408 + ,test_hive_1384 as test_hive_1384 + ,test_hive_1385 as test_hive_1385 + ,test_hive_1386 as test_hive_1386 + ,test_hive_1387 as test_hive_1387 + ,test_hive_1388 as test_hive_1388 + ,test_hive_1389 as test_hive_1389 + ,test_hive_1390 as test_hive_1390 + ,test_hive_1391 as test_hive_1391 + ,test_hive_1392 as test_hive_1392 + ,test_hive_1393 as test_hive_1393 + ,test_hive_400 as test_hive_400 + ,test_hive_386 as test_hive_386 + ,test_hive_409 as test_hive_409 + ,test_hive_390 as test_hive_390 + ,test_hive_381 as test_hive_381 + ,test_hive_380 as test_hive_380 + ,test_hive_382 as test_hive_382 + ,test_hive_382_txt as test_hive_382_txt + ,test_hive_410 as test_hive_410 + ,test_hive_391 as test_hive_391 + ,test_hive_403 as test_hive_403 + ,test_hive_388 as test_hive_388 + ,test_hive_405 as test_hive_405 + ,test_hive_389 as test_hive_389 + ,test_hive_393 as test_hive_393 + ,test_hive_376 as test_hive_376 + ,test_hive_394 as test_hive_394 + ,test_hive_377 as test_hive_377 + ,test_hive_395 as test_hive_395 + ,test_hive_378 as test_hive_378 + ,test_hive_406 as test_hive_406 + ,test_hive_1406 as test_hive_1406 + ,test_hive_404 as test_hive_404 + ,test_hive_1405 as test_hive_1405 + ,test_hive_396 as test_hive_396 + ,test_hive_1403 as test_hive_1403 + ,test_hive_402 as test_hive_402 + ,test_hive_1404 as test_hive_1404 + ,test_hive_398 as test_hive_398 + ,test_hive_384 as test_hive_384 + ,test_hive_399 as test_hive_399 + ,test_hive_385 as test_hive_385 + ,test_hive_411 as test_hive_411 + ,test_hive_392 as test_hive_392 + ,test_hive_401 as test_hive_401 + ,test_hive_387 as test_hive_387 + ,test_hive_1396 as test_hive_1396 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1402 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1400 +PREHOOK: Input: default@test_hive_1402 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1401 +POSTHOOK: query: create view test_hive_1401 +as +select + test_hive_1394 as test_hive_1394 + ,test_hive_1383 as test_hive_1383 + ,test_hive_1395 as test_hive_1395 + ,test_hive_1382 as test_hive_1382 + ,test_hive_407 as test_hive_407 + ,test_hive_397 as test_hive_397 + ,test_hive_379 as test_hive_379 + ,test_hive_408 as test_hive_408 + ,test_hive_1384 as test_hive_1384 + ,test_hive_1385 as test_hive_1385 + ,test_hive_1386 as test_hive_1386 + ,test_hive_1387 as test_hive_1387 + ,test_hive_1388 as test_hive_1388 + ,test_hive_1389 as test_hive_1389 + ,test_hive_1390 as test_hive_1390 + ,test_hive_1391 as test_hive_1391 + ,test_hive_1392 as test_hive_1392 + ,test_hive_1393 as test_hive_1393 + ,test_hive_400 as test_hive_400 + ,test_hive_386 as test_hive_386 + ,test_hive_409 as test_hive_409 + ,test_hive_390 as test_hive_390 + ,test_hive_381 as test_hive_381 + ,test_hive_380 as test_hive_380 + ,test_hive_382 as test_hive_382 + ,test_hive_382_txt as test_hive_382_txt + ,test_hive_410 as test_hive_410 + ,test_hive_391 as test_hive_391 + ,test_hive_403 as test_hive_403 + ,test_hive_388 as test_hive_388 + ,test_hive_405 as test_hive_405 + ,test_hive_389 as test_hive_389 + ,test_hive_393 as test_hive_393 + ,test_hive_376 as test_hive_376 + ,test_hive_394 as test_hive_394 + ,test_hive_377 as test_hive_377 + ,test_hive_395 as test_hive_395 + ,test_hive_378 as test_hive_378 + ,test_hive_406 as test_hive_406 + ,test_hive_1406 as test_hive_1406 + ,test_hive_404 as test_hive_404 + ,test_hive_1405 as test_hive_1405 + ,test_hive_396 as test_hive_396 + ,test_hive_1403 as test_hive_1403 + ,test_hive_402 as test_hive_402 + ,test_hive_1404 as test_hive_1404 + ,test_hive_398 as test_hive_398 + ,test_hive_384 as test_hive_384 + ,test_hive_399 as test_hive_399 + ,test_hive_385 as test_hive_385 + ,test_hive_411 as test_hive_411 + ,test_hive_392 as test_hive_392 + ,test_hive_401 as test_hive_401 + ,test_hive_387 as test_hive_387 + ,test_hive_1396 as test_hive_1396 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1402 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1400 +POSTHOOK: Input: default@test_hive_1402 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1401 +POSTHOOK: Lineage: test_hive_1401.creation_date EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.ds EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.ds_ts SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.source_file_name SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1382 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1382, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1383 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1383, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1384 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1384, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1385 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1385, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1386 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1386, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1387 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1387, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1388 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1388, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1389 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1389, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1390 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1390, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1391 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1391, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1392 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1392, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1393 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1393, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1394 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1394, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1395 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1395, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1396 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1396, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1403 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1403, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1404 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1404, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1405 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1405, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_1406 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1406, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_376 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_376, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_377 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_377, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_378 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_378, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_379 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_379, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_380 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_380, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_381 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_381, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_382 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_382, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_382_txt SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_382_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_384 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_384, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_385 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_385, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_386 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_386, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_387 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_387, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_388 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_388, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_389 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_389, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_390 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_390, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_391 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_391, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_392 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_392, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_393 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_393, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_394 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_394, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_395 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_395, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_396 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_396, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_397 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_397, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_398 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_398, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_399 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_399, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_400 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_400, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_401 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_401, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_402 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_402, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_403 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_403, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_404 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_404, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_405 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_405, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_406 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_406, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_407 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_407, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_408 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_408, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_409 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_409, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_410 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_410, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.test_hive_411 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_411, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1401.ts EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1398 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1398 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1398 +as +select t1.* +from test_hive_1401 t1 +inner join test_hive_1399 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1399 +PREHOOK: Input: default@test_hive_1400 +PREHOOK: Input: default@test_hive_1401 +PREHOOK: Input: default@test_hive_1402 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1398 +POSTHOOK: query: create view test_hive_1398 +as +select t1.* +from test_hive_1401 t1 +inner join test_hive_1399 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1399 +POSTHOOK: Input: default@test_hive_1400 +POSTHOOK: Input: default@test_hive_1401 +POSTHOOK: Input: default@test_hive_1402 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1398 +POSTHOOK: Lineage: test_hive_1398.creation_date EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.ds EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.ds_ts SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.source_file_name SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1382 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1382, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1383 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1383, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1384 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1384, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1385 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1385, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1386 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1386, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1387 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1387, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1388 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1388, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1389 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1389, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1390 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1390, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1391 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1391, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1392 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1392, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1393 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1393, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1394 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1394, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1395 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1395, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1396 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1396, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1403 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1403, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1404 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1404, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1405 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1405, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_1406 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_1406, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_376 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_376, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_377 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_377, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_378 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_378, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_379 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_379, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_380 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_380, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_381 EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_381, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_382 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_382, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_382_txt SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_382_txt, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_384 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_384, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_385 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_385, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_386 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_386, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_387 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_387, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_388 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_388, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_389 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_389, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_390 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_390, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_391 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_391, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_392 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_392, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_393 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_393, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_394 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_394, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_395 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_395, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_396 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_396, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_397 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_397, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_398 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_398, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_399 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_399, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_400 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_400, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_401 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_401, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_402 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_402, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_403 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_403, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_404 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_404, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_405 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_405, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_406 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_406, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_407 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_407, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_408 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_408, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_409 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_409, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_410 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_410, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.test_hive_411 SIMPLE [(test_hive_1400)test_hive_1400.FieldSchema(name:test_hive_411, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1398.ts EXPRESSION [(test_hive_1400)test_hive_1400.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1417 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1417 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1417 +( + test_hive_1411 string + ,test_hive_1407 string + ,test_hive_1412 string + ,test_hive_412 string + ,test_hive_1410 string + ,test_hive_1409 string + ,test_hive_1408 string + ,test_hive_1415 string + ,test_hive_1414 string + ,test_hive_1413 string + ,test_hive_1416 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1417 +POSTHOOK: query: create table test_hive_1417 +( + test_hive_1411 string + ,test_hive_1407 string + ,test_hive_1412 string + ,test_hive_412 string + ,test_hive_1410 string + ,test_hive_1409 string + ,test_hive_1408 string + ,test_hive_1415 string + ,test_hive_1414 string + ,test_hive_1413 string + ,test_hive_1416 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1417 +PREHOOK: query: create table if not exists test_hive_1420 +( + test_hive_1411 string + ,test_hive_1407 string + ,test_hive_1412 string + ,test_hive_412 string + ,test_hive_1410 string + ,test_hive_1409 string + ,test_hive_1408 string + ,test_hive_1415 string + ,test_hive_1414 string + ,test_hive_1413 string + ,test_hive_1416 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1420 +POSTHOOK: query: create table if not exists test_hive_1420 +( + test_hive_1411 string + ,test_hive_1407 string + ,test_hive_1412 string + ,test_hive_412 string + ,test_hive_1410 string + ,test_hive_1409 string + ,test_hive_1408 string + ,test_hive_1415 string + ,test_hive_1414 string + ,test_hive_1413 string + ,test_hive_1416 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1420 +PREHOOK: query: drop table if exists test_hive_1419 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1419 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1419 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1419 +POSTHOOK: query: create table if not exists test_hive_1419 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1419 +PREHOOK: query: drop view if exists test_hive_1422 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1422 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1422 +as +select + cast(test_hive_1411 as int) as test_hive_1411 + ,cast(test_hive_1407 as int) as test_hive_1407 + ,cast(test_hive_1412 as int) as test_hive_1412 + ,cast(test_hive_412 as string) as test_hive_412 + ,cast(test_hive_1410 as string) as test_hive_1410 + ,cast(from_unixtime(unix_timestamp(test_hive_1409,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1409 + ,cast(from_unixtime(unix_timestamp(test_hive_1408,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1408 + ,cast(test_hive_1415 as string) as test_hive_1415 + ,cast(test_hive_1414 as string) as test_hive_1414 + ,cast(test_hive_1413 as string) as test_hive_1413 + ,cast(from_unixtime(unix_timestamp(test_hive_1416,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1416 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1420 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1420 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1422 +POSTHOOK: query: create view if not exists test_hive_1422 +as +select + cast(test_hive_1411 as int) as test_hive_1411 + ,cast(test_hive_1407 as int) as test_hive_1407 + ,cast(test_hive_1412 as int) as test_hive_1412 + ,cast(test_hive_412 as string) as test_hive_412 + ,cast(test_hive_1410 as string) as test_hive_1410 + ,cast(from_unixtime(unix_timestamp(test_hive_1409,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1409 + ,cast(from_unixtime(unix_timestamp(test_hive_1408,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1408 + ,cast(test_hive_1415 as string) as test_hive_1415 + ,cast(test_hive_1414 as string) as test_hive_1414 + ,cast(test_hive_1413 as string) as test_hive_1413 + ,cast(from_unixtime(unix_timestamp(test_hive_1416,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1416 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1420 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1420 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1422 +POSTHOOK: Lineage: test_hive_1422.creation_date EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.ds EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.ds_ts SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.source_file_name SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1407 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1407, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1408 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1408, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1409 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1409, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1410 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1410, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1411 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1411, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1412 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1412, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1413 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1413, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1414 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1414, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1415 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1415, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_1416 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1416, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.test_hive_412 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_412, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1422.ts EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1421 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1421 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1421 +as +select + test_hive_1411 as test_hive_1411 + ,test_hive_1407 as test_hive_1407 + ,test_hive_1412 as test_hive_1412 + ,test_hive_412 as test_hive_412 + ,test_hive_1410 as test_hive_1410 + ,test_hive_1409 as test_hive_1409 + ,test_hive_1408 as test_hive_1408 + ,test_hive_1415 as test_hive_1415 + ,test_hive_1414 as test_hive_1414 + ,test_hive_1413 as test_hive_1413 + ,test_hive_1416 as test_hive_1416 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1422 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1420 +PREHOOK: Input: default@test_hive_1422 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1421 +POSTHOOK: query: create view test_hive_1421 +as +select + test_hive_1411 as test_hive_1411 + ,test_hive_1407 as test_hive_1407 + ,test_hive_1412 as test_hive_1412 + ,test_hive_412 as test_hive_412 + ,test_hive_1410 as test_hive_1410 + ,test_hive_1409 as test_hive_1409 + ,test_hive_1408 as test_hive_1408 + ,test_hive_1415 as test_hive_1415 + ,test_hive_1414 as test_hive_1414 + ,test_hive_1413 as test_hive_1413 + ,test_hive_1416 as test_hive_1416 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1422 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1420 +POSTHOOK: Input: default@test_hive_1422 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1421 +POSTHOOK: Lineage: test_hive_1421.creation_date EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.ds EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.ds_ts SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.source_file_name SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1407 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1407, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1408 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1408, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1409 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1409, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1410 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1410, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1411 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1411, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1412 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1412, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1413 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1413, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1414 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1414, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1415 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1415, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_1416 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1416, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.test_hive_412 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_412, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1421.ts EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1418 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1418 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1418 +as +select t1.* +from test_hive_1421 t1 +inner join test_hive_1419 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1419 +PREHOOK: Input: default@test_hive_1420 +PREHOOK: Input: default@test_hive_1421 +PREHOOK: Input: default@test_hive_1422 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1418 +POSTHOOK: query: create view test_hive_1418 +as +select t1.* +from test_hive_1421 t1 +inner join test_hive_1419 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1419 +POSTHOOK: Input: default@test_hive_1420 +POSTHOOK: Input: default@test_hive_1421 +POSTHOOK: Input: default@test_hive_1422 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1418 +POSTHOOK: Lineage: test_hive_1418.creation_date EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.ds EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.ds_ts SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.source_file_name SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1407 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1407, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1408 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1408, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1409 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1409, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1410 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1410, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1411 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1411, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1412 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1412, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1413 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1413, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1414 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1414, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1415 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1415, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_1416 EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_1416, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.test_hive_412 SIMPLE [(test_hive_1420)test_hive_1420.FieldSchema(name:test_hive_412, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1418.ts EXPRESSION [(test_hive_1420)test_hive_1420.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1114 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1114 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1114 +( + test_hive_1108 string + ,test_hive_1106 string + ,test_hive_1109 string + ,test_hive_272 string + ,test_hive_1107 string + ,test_hive_1112 string + ,test_hive_1111 string + ,test_hive_1110 string + ,test_hive_1113 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1114 +POSTHOOK: query: create table test_hive_1114 +( + test_hive_1108 string + ,test_hive_1106 string + ,test_hive_1109 string + ,test_hive_272 string + ,test_hive_1107 string + ,test_hive_1112 string + ,test_hive_1111 string + ,test_hive_1110 string + ,test_hive_1113 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1114 +PREHOOK: query: create table if not exists test_hive_1117 +( + test_hive_1108 string + ,test_hive_1106 string + ,test_hive_1109 string + ,test_hive_272 string + ,test_hive_1107 string + ,test_hive_1112 string + ,test_hive_1111 string + ,test_hive_1110 string + ,test_hive_1113 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1117 +POSTHOOK: query: create table if not exists test_hive_1117 +( + test_hive_1108 string + ,test_hive_1106 string + ,test_hive_1109 string + ,test_hive_272 string + ,test_hive_1107 string + ,test_hive_1112 string + ,test_hive_1111 string + ,test_hive_1110 string + ,test_hive_1113 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1117 +PREHOOK: query: drop table if exists test_hive_1116 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1116 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1116 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1116 +POSTHOOK: query: create table if not exists test_hive_1116 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1116 +PREHOOK: query: drop view if exists test_hive_1119 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1119 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1119 +as +select + cast(test_hive_1108 as int) as test_hive_1108 + ,cast(test_hive_1106 as int) as test_hive_1106 + ,cast(test_hive_1109 as int) as test_hive_1109 + ,cast(test_hive_272 as string) as test_hive_272 + ,cast(test_hive_1107 as string) as test_hive_1107 + ,cast(test_hive_1112 as string) as test_hive_1112 + ,cast(test_hive_1111 as string) as test_hive_1111 + ,cast(test_hive_1110 as string) as test_hive_1110 + ,cast(from_unixtime(unix_timestamp(test_hive_1113,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1113 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1117 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1117 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1119 +POSTHOOK: query: create view if not exists test_hive_1119 +as +select + cast(test_hive_1108 as int) as test_hive_1108 + ,cast(test_hive_1106 as int) as test_hive_1106 + ,cast(test_hive_1109 as int) as test_hive_1109 + ,cast(test_hive_272 as string) as test_hive_272 + ,cast(test_hive_1107 as string) as test_hive_1107 + ,cast(test_hive_1112 as string) as test_hive_1112 + ,cast(test_hive_1111 as string) as test_hive_1111 + ,cast(test_hive_1110 as string) as test_hive_1110 + ,cast(from_unixtime(unix_timestamp(test_hive_1113,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1113 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1117 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1117 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1119 +POSTHOOK: Lineage: test_hive_1119.creation_date EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.ds EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.ds_ts SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.source_file_name SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1106 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1106, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1107 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1107, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1108 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1108, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1109 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1109, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1110 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1110, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1111 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1111, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1112 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1112, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_1113 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1113, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.test_hive_272 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_272, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1119.ts EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1118 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1118 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1118 +as +select + test_hive_1108 as test_hive_1108 + ,test_hive_1106 as test_hive_1106 + ,test_hive_1109 as test_hive_1109 + ,test_hive_272 as test_hive_272 + ,test_hive_1107 as test_hive_1107 + ,test_hive_1112 as test_hive_1112 + ,test_hive_1111 as test_hive_1111 + ,test_hive_1110 as test_hive_1110 + ,test_hive_1113 as test_hive_1113 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1119 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1117 +PREHOOK: Input: default@test_hive_1119 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1118 +POSTHOOK: query: create view test_hive_1118 +as +select + test_hive_1108 as test_hive_1108 + ,test_hive_1106 as test_hive_1106 + ,test_hive_1109 as test_hive_1109 + ,test_hive_272 as test_hive_272 + ,test_hive_1107 as test_hive_1107 + ,test_hive_1112 as test_hive_1112 + ,test_hive_1111 as test_hive_1111 + ,test_hive_1110 as test_hive_1110 + ,test_hive_1113 as test_hive_1113 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1119 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1117 +POSTHOOK: Input: default@test_hive_1119 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1118 +POSTHOOK: Lineage: test_hive_1118.creation_date EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.ds EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.ds_ts SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.source_file_name SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1106 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1106, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1107 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1107, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1108 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1108, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1109 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1109, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1110 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1110, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1111 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1111, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1112 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1112, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_1113 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1113, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.test_hive_272 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_272, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1118.ts EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1115 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1115 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1115 +as +select t1.* +from test_hive_1118 t1 +inner join test_hive_1116 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1116 +PREHOOK: Input: default@test_hive_1117 +PREHOOK: Input: default@test_hive_1118 +PREHOOK: Input: default@test_hive_1119 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1115 +POSTHOOK: query: create view test_hive_1115 +as +select t1.* +from test_hive_1118 t1 +inner join test_hive_1116 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1116 +POSTHOOK: Input: default@test_hive_1117 +POSTHOOK: Input: default@test_hive_1118 +POSTHOOK: Input: default@test_hive_1119 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1115 +POSTHOOK: Lineage: test_hive_1115.creation_date EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.ds EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.ds_ts SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.source_file_name SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1106 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1106, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1107 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1107, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1108 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1108, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1109 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1109, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1110 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1110, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1111 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1111, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1112 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1112, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_1113 EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_1113, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.test_hive_272 SIMPLE [(test_hive_1117)test_hive_1117.FieldSchema(name:test_hive_272, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1115.ts EXPRESSION [(test_hive_1117)test_hive_1117.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1128 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1128 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1128 +( + test_hive_1122 string + ,test_hive_1120 string + ,test_hive_1123 string + ,test_hive_273 string + ,test_hive_1121 string + ,test_hive_1126 string + ,test_hive_1125 string + ,test_hive_1124 string + ,test_hive_1127 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1128 +POSTHOOK: query: create table test_hive_1128 +( + test_hive_1122 string + ,test_hive_1120 string + ,test_hive_1123 string + ,test_hive_273 string + ,test_hive_1121 string + ,test_hive_1126 string + ,test_hive_1125 string + ,test_hive_1124 string + ,test_hive_1127 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1128 +PREHOOK: query: create table if not exists test_hive_1131 +( + test_hive_1122 string + ,test_hive_1120 string + ,test_hive_1123 string + ,test_hive_273 string + ,test_hive_1121 string + ,test_hive_1126 string + ,test_hive_1125 string + ,test_hive_1124 string + ,test_hive_1127 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1131 +POSTHOOK: query: create table if not exists test_hive_1131 +( + test_hive_1122 string + ,test_hive_1120 string + ,test_hive_1123 string + ,test_hive_273 string + ,test_hive_1121 string + ,test_hive_1126 string + ,test_hive_1125 string + ,test_hive_1124 string + ,test_hive_1127 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1131 +PREHOOK: query: drop table if exists test_hive_1130 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1130 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1130 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1130 +POSTHOOK: query: create table if not exists test_hive_1130 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1130 +PREHOOK: query: drop view if exists test_hive_1133 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1133 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1133 +as +select + cast(test_hive_1122 as int) as test_hive_1122 + ,cast(test_hive_1120 as int) as test_hive_1120 + ,cast(test_hive_1123 as int) as test_hive_1123 + ,cast(test_hive_273 as string) as test_hive_273 + ,cast(test_hive_1121 as string) as test_hive_1121 + ,cast(test_hive_1126 as string) as test_hive_1126 + ,cast(test_hive_1125 as string) as test_hive_1125 + ,cast(test_hive_1124 as string) as test_hive_1124 + ,cast(from_unixtime(unix_timestamp(test_hive_1127,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1127 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1131 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1131 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1133 +POSTHOOK: query: create view if not exists test_hive_1133 +as +select + cast(test_hive_1122 as int) as test_hive_1122 + ,cast(test_hive_1120 as int) as test_hive_1120 + ,cast(test_hive_1123 as int) as test_hive_1123 + ,cast(test_hive_273 as string) as test_hive_273 + ,cast(test_hive_1121 as string) as test_hive_1121 + ,cast(test_hive_1126 as string) as test_hive_1126 + ,cast(test_hive_1125 as string) as test_hive_1125 + ,cast(test_hive_1124 as string) as test_hive_1124 + ,cast(from_unixtime(unix_timestamp(test_hive_1127,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1127 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1131 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1131 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1133 +POSTHOOK: Lineage: test_hive_1133.creation_date EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.ds EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.ds_ts SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.source_file_name SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1120 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1120, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1121 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1121, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1122 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1122, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1123 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1123, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1124 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1124, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1125 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1125, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1126 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1126, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_1127 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1127, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.test_hive_273 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_273, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1133.ts EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1132 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1132 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1132 +as +select + test_hive_1122 as test_hive_1122 + ,test_hive_1120 as test_hive_1120 + ,test_hive_1123 as test_hive_1123 + ,test_hive_273 as test_hive_273 + ,test_hive_1121 as test_hive_1121 + ,test_hive_1126 as test_hive_1126 + ,test_hive_1125 as test_hive_1125 + ,test_hive_1124 as test_hive_1124 + ,test_hive_1127 as test_hive_1127 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1133 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1131 +PREHOOK: Input: default@test_hive_1133 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1132 +POSTHOOK: query: create view test_hive_1132 +as +select + test_hive_1122 as test_hive_1122 + ,test_hive_1120 as test_hive_1120 + ,test_hive_1123 as test_hive_1123 + ,test_hive_273 as test_hive_273 + ,test_hive_1121 as test_hive_1121 + ,test_hive_1126 as test_hive_1126 + ,test_hive_1125 as test_hive_1125 + ,test_hive_1124 as test_hive_1124 + ,test_hive_1127 as test_hive_1127 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1133 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1131 +POSTHOOK: Input: default@test_hive_1133 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1132 +POSTHOOK: Lineage: test_hive_1132.creation_date EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.ds EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.ds_ts SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.source_file_name SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1120 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1120, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1121 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1121, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1122 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1122, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1123 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1123, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1124 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1124, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1125 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1125, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1126 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1126, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_1127 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1127, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.test_hive_273 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_273, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1132.ts EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1129 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1129 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1129 +as +select t1.* +from test_hive_1132 t1 +inner join test_hive_1130 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1130 +PREHOOK: Input: default@test_hive_1131 +PREHOOK: Input: default@test_hive_1132 +PREHOOK: Input: default@test_hive_1133 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1129 +POSTHOOK: query: create view test_hive_1129 +as +select t1.* +from test_hive_1132 t1 +inner join test_hive_1130 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1130 +POSTHOOK: Input: default@test_hive_1131 +POSTHOOK: Input: default@test_hive_1132 +POSTHOOK: Input: default@test_hive_1133 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1129 +POSTHOOK: Lineage: test_hive_1129.creation_date EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.ds EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.ds_ts SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.source_file_name SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1120 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1120, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1121 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1121, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1122 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1122, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1123 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1123, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1124 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1124, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1125 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1125, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1126 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1126, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_1127 EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_1127, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.test_hive_273 SIMPLE [(test_hive_1131)test_hive_1131.FieldSchema(name:test_hive_273, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1129.ts EXPRESSION [(test_hive_1131)test_hive_1131.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1142 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1142 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1142 +( + test_hive_1136 string + ,test_hive_1134 string + ,test_hive_1137 string + ,test_hive_274 string + ,test_hive_1135 string + ,test_hive_1140 string + ,test_hive_1139 string + ,test_hive_1138 string + ,test_hive_1141 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1142 +POSTHOOK: query: create table test_hive_1142 +( + test_hive_1136 string + ,test_hive_1134 string + ,test_hive_1137 string + ,test_hive_274 string + ,test_hive_1135 string + ,test_hive_1140 string + ,test_hive_1139 string + ,test_hive_1138 string + ,test_hive_1141 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1142 +PREHOOK: query: create table if not exists test_hive_1145 +( + test_hive_1136 string + ,test_hive_1134 string + ,test_hive_1137 string + ,test_hive_274 string + ,test_hive_1135 string + ,test_hive_1140 string + ,test_hive_1139 string + ,test_hive_1138 string + ,test_hive_1141 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1145 +POSTHOOK: query: create table if not exists test_hive_1145 +( + test_hive_1136 string + ,test_hive_1134 string + ,test_hive_1137 string + ,test_hive_274 string + ,test_hive_1135 string + ,test_hive_1140 string + ,test_hive_1139 string + ,test_hive_1138 string + ,test_hive_1141 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1145 +PREHOOK: query: drop table if exists test_hive_1144 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1144 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1144 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1144 +POSTHOOK: query: create table if not exists test_hive_1144 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1144 +PREHOOK: query: drop view if exists test_hive_1147 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1147 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1147 +as +select + cast(test_hive_1136 as int) as test_hive_1136 + ,cast(test_hive_1134 as int) as test_hive_1134 + ,cast(test_hive_1137 as int) as test_hive_1137 + ,cast(test_hive_274 as string) as test_hive_274 + ,cast(test_hive_1135 as string) as test_hive_1135 + ,cast(test_hive_1140 as string) as test_hive_1140 + ,cast(test_hive_1139 as string) as test_hive_1139 + ,cast(test_hive_1138 as string) as test_hive_1138 + ,cast(from_unixtime(unix_timestamp(test_hive_1141,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1141 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1145 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1145 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1147 +POSTHOOK: query: create view if not exists test_hive_1147 +as +select + cast(test_hive_1136 as int) as test_hive_1136 + ,cast(test_hive_1134 as int) as test_hive_1134 + ,cast(test_hive_1137 as int) as test_hive_1137 + ,cast(test_hive_274 as string) as test_hive_274 + ,cast(test_hive_1135 as string) as test_hive_1135 + ,cast(test_hive_1140 as string) as test_hive_1140 + ,cast(test_hive_1139 as string) as test_hive_1139 + ,cast(test_hive_1138 as string) as test_hive_1138 + ,cast(from_unixtime(unix_timestamp(test_hive_1141,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1141 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1145 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1145 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1147 +POSTHOOK: Lineage: test_hive_1147.creation_date EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.ds EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.ds_ts SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.source_file_name SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1134 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1134, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1135 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1135, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1136 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1136, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1137 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1137, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1138 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1138, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1139 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1139, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1140 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1140, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_1141 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1141, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.test_hive_274 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_274, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1147.ts EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1146 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1146 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1146 +as +select + test_hive_1136 as test_hive_1136 + ,test_hive_1134 as test_hive_1134 + ,test_hive_1137 as test_hive_1137 + ,test_hive_274 as test_hive_274 + ,test_hive_1135 as test_hive_1135 + ,test_hive_1140 as test_hive_1140 + ,test_hive_1139 as test_hive_1139 + ,test_hive_1138 as test_hive_1138 + ,test_hive_1141 as test_hive_1141 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1147 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1145 +PREHOOK: Input: default@test_hive_1147 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1146 +POSTHOOK: query: create view test_hive_1146 +as +select + test_hive_1136 as test_hive_1136 + ,test_hive_1134 as test_hive_1134 + ,test_hive_1137 as test_hive_1137 + ,test_hive_274 as test_hive_274 + ,test_hive_1135 as test_hive_1135 + ,test_hive_1140 as test_hive_1140 + ,test_hive_1139 as test_hive_1139 + ,test_hive_1138 as test_hive_1138 + ,test_hive_1141 as test_hive_1141 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1147 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1145 +POSTHOOK: Input: default@test_hive_1147 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1146 +POSTHOOK: Lineage: test_hive_1146.creation_date EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.ds EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.ds_ts SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.source_file_name SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1134 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1134, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1135 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1135, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1136 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1136, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1137 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1137, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1138 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1138, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1139 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1139, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1140 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1140, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_1141 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1141, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.test_hive_274 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_274, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1146.ts EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1143 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1143 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1143 +as +select t1.* +from test_hive_1146 t1 +inner join test_hive_1144 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1144 +PREHOOK: Input: default@test_hive_1145 +PREHOOK: Input: default@test_hive_1146 +PREHOOK: Input: default@test_hive_1147 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1143 +POSTHOOK: query: create view test_hive_1143 +as +select t1.* +from test_hive_1146 t1 +inner join test_hive_1144 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1144 +POSTHOOK: Input: default@test_hive_1145 +POSTHOOK: Input: default@test_hive_1146 +POSTHOOK: Input: default@test_hive_1147 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1143 +POSTHOOK: Lineage: test_hive_1143.creation_date EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.ds EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.ds_ts SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.source_file_name SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1134 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1134, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1135 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1135, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1136 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1136, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1137 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1137, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1138 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1138, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1139 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1139, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1140 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1140, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_1141 EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_1141, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.test_hive_274 SIMPLE [(test_hive_1145)test_hive_1145.FieldSchema(name:test_hive_274, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1143.ts EXPRESSION [(test_hive_1145)test_hive_1145.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1158 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1158 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1158 +( + test_hive_1152 string + ,test_hive_1148 string + ,test_hive_1153 string + ,test_hive_275 string + ,test_hive_1151 string + ,test_hive_1150 string + ,test_hive_1149 string + ,test_hive_1156 string + ,test_hive_1155 string + ,test_hive_1154 string + ,test_hive_1157 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1158 +POSTHOOK: query: create table test_hive_1158 +( + test_hive_1152 string + ,test_hive_1148 string + ,test_hive_1153 string + ,test_hive_275 string + ,test_hive_1151 string + ,test_hive_1150 string + ,test_hive_1149 string + ,test_hive_1156 string + ,test_hive_1155 string + ,test_hive_1154 string + ,test_hive_1157 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1158 +PREHOOK: query: create table if not exists test_hive_1161 +( + test_hive_1152 string + ,test_hive_1148 string + ,test_hive_1153 string + ,test_hive_275 string + ,test_hive_1151 string + ,test_hive_1150 string + ,test_hive_1149 string + ,test_hive_1156 string + ,test_hive_1155 string + ,test_hive_1154 string + ,test_hive_1157 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1161 +POSTHOOK: query: create table if not exists test_hive_1161 +( + test_hive_1152 string + ,test_hive_1148 string + ,test_hive_1153 string + ,test_hive_275 string + ,test_hive_1151 string + ,test_hive_1150 string + ,test_hive_1149 string + ,test_hive_1156 string + ,test_hive_1155 string + ,test_hive_1154 string + ,test_hive_1157 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1161 +PREHOOK: query: drop table if exists test_hive_1160 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1160 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1160 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1160 +POSTHOOK: query: create table if not exists test_hive_1160 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1160 +PREHOOK: query: drop view if exists test_hive_1163 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1163 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1163 +as +select + cast(test_hive_1152 as int) as test_hive_1152 + ,cast(test_hive_1148 as int) as test_hive_1148 + ,cast(test_hive_1153 as int) as test_hive_1153 + ,cast(test_hive_275 as decimal) as test_hive_275 + ,cast(test_hive_1151 as string) as test_hive_1151 + ,cast(from_unixtime(unix_timestamp(test_hive_1150,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1150 + ,cast(from_unixtime(unix_timestamp(test_hive_1149,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1149 + ,cast(test_hive_1156 as string) as test_hive_1156 + ,cast(test_hive_1155 as string) as test_hive_1155 + ,cast(test_hive_1154 as string) as test_hive_1154 + ,cast(from_unixtime(unix_timestamp(test_hive_1157,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1157 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1161 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1161 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1163 +POSTHOOK: query: create view if not exists test_hive_1163 +as +select + cast(test_hive_1152 as int) as test_hive_1152 + ,cast(test_hive_1148 as int) as test_hive_1148 + ,cast(test_hive_1153 as int) as test_hive_1153 + ,cast(test_hive_275 as decimal) as test_hive_275 + ,cast(test_hive_1151 as string) as test_hive_1151 + ,cast(from_unixtime(unix_timestamp(test_hive_1150,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1150 + ,cast(from_unixtime(unix_timestamp(test_hive_1149,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1149 + ,cast(test_hive_1156 as string) as test_hive_1156 + ,cast(test_hive_1155 as string) as test_hive_1155 + ,cast(test_hive_1154 as string) as test_hive_1154 + ,cast(from_unixtime(unix_timestamp(test_hive_1157,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1157 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1161 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1161 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1163 +POSTHOOK: Lineage: test_hive_1163.creation_date EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.ds EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.ds_ts SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.source_file_name SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1148 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1148, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1149 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1149, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1150 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1150, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1151 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1151, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1152 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1152, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1153 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1153, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1154 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1154, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1155 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1155, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1156 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1156, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_1157 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1157, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.test_hive_275 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_275, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1163.ts EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1162 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1162 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1162 +as +select + test_hive_1152 as test_hive_1152 + ,test_hive_1148 as test_hive_1148 + ,test_hive_1153 as test_hive_1153 + ,test_hive_275 as test_hive_275 + ,test_hive_1151 as test_hive_1151 + ,test_hive_1150 as test_hive_1150 + ,test_hive_1149 as test_hive_1149 + ,test_hive_1156 as test_hive_1156 + ,test_hive_1155 as test_hive_1155 + ,test_hive_1154 as test_hive_1154 + ,test_hive_1157 as test_hive_1157 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1163 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1161 +PREHOOK: Input: default@test_hive_1163 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1162 +POSTHOOK: query: create view test_hive_1162 +as +select + test_hive_1152 as test_hive_1152 + ,test_hive_1148 as test_hive_1148 + ,test_hive_1153 as test_hive_1153 + ,test_hive_275 as test_hive_275 + ,test_hive_1151 as test_hive_1151 + ,test_hive_1150 as test_hive_1150 + ,test_hive_1149 as test_hive_1149 + ,test_hive_1156 as test_hive_1156 + ,test_hive_1155 as test_hive_1155 + ,test_hive_1154 as test_hive_1154 + ,test_hive_1157 as test_hive_1157 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1163 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1161 +POSTHOOK: Input: default@test_hive_1163 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1162 +POSTHOOK: Lineage: test_hive_1162.creation_date EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.ds EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.ds_ts SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.source_file_name SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1148 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1148, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1149 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1149, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1150 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1150, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1151 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1151, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1152 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1152, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1153 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1153, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1154 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1154, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1155 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1155, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1156 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1156, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_1157 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1157, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.test_hive_275 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_275, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1162.ts EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1159 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1159 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1159 +as +select t1.* +from test_hive_1162 t1 +inner join test_hive_1160 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1160 +PREHOOK: Input: default@test_hive_1161 +PREHOOK: Input: default@test_hive_1162 +PREHOOK: Input: default@test_hive_1163 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1159 +POSTHOOK: query: create view test_hive_1159 +as +select t1.* +from test_hive_1162 t1 +inner join test_hive_1160 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1160 +POSTHOOK: Input: default@test_hive_1161 +POSTHOOK: Input: default@test_hive_1162 +POSTHOOK: Input: default@test_hive_1163 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1159 +POSTHOOK: Lineage: test_hive_1159.creation_date EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.ds EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.ds_ts SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.source_file_name SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1148 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1148, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1149 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1149, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1150 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1150, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1151 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1151, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1152 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1152, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1153 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1153, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1154 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1154, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1155 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1155, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1156 SIMPLE [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1156, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_1157 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_1157, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.test_hive_275 EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:test_hive_275, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1159.ts EXPRESSION [(test_hive_1161)test_hive_1161.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1172 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1172 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1172 +( + test_hive_1166 string + ,test_hive_1164 string + ,test_hive_1167 string + ,test_hive_276 string + ,test_hive_1165 string + ,test_hive_1170 string + ,test_hive_1169 string + ,test_hive_1168 string + ,test_hive_1171 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1172 +POSTHOOK: query: create table test_hive_1172 +( + test_hive_1166 string + ,test_hive_1164 string + ,test_hive_1167 string + ,test_hive_276 string + ,test_hive_1165 string + ,test_hive_1170 string + ,test_hive_1169 string + ,test_hive_1168 string + ,test_hive_1171 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1172 +PREHOOK: query: create table if not exists test_hive_1175 +( + test_hive_1166 string + ,test_hive_1164 string + ,test_hive_1167 string + ,test_hive_276 string + ,test_hive_1165 string + ,test_hive_1170 string + ,test_hive_1169 string + ,test_hive_1168 string + ,test_hive_1171 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1175 +POSTHOOK: query: create table if not exists test_hive_1175 +( + test_hive_1166 string + ,test_hive_1164 string + ,test_hive_1167 string + ,test_hive_276 string + ,test_hive_1165 string + ,test_hive_1170 string + ,test_hive_1169 string + ,test_hive_1168 string + ,test_hive_1171 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1175 +PREHOOK: query: drop table if exists test_hive_1174 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1174 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1174 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1174 +POSTHOOK: query: create table if not exists test_hive_1174 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1174 +PREHOOK: query: drop view if exists test_hive_1177 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1177 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1177 +as +select + cast(test_hive_1166 as int) as test_hive_1166 + ,cast(test_hive_1164 as int) as test_hive_1164 + ,cast(test_hive_1167 as int) as test_hive_1167 + ,cast(test_hive_276 as string) as test_hive_276 + ,cast(test_hive_1165 as string) as test_hive_1165 + ,cast(test_hive_1170 as string) as test_hive_1170 + ,cast(test_hive_1169 as string) as test_hive_1169 + ,cast(test_hive_1168 as string) as test_hive_1168 + ,cast(from_unixtime(unix_timestamp(test_hive_1171,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1171 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1175 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1175 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1177 +POSTHOOK: query: create view if not exists test_hive_1177 +as +select + cast(test_hive_1166 as int) as test_hive_1166 + ,cast(test_hive_1164 as int) as test_hive_1164 + ,cast(test_hive_1167 as int) as test_hive_1167 + ,cast(test_hive_276 as string) as test_hive_276 + ,cast(test_hive_1165 as string) as test_hive_1165 + ,cast(test_hive_1170 as string) as test_hive_1170 + ,cast(test_hive_1169 as string) as test_hive_1169 + ,cast(test_hive_1168 as string) as test_hive_1168 + ,cast(from_unixtime(unix_timestamp(test_hive_1171,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1171 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1175 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1175 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1177 +POSTHOOK: Lineage: test_hive_1177.creation_date EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.ds EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.ds_ts SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.source_file_name SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1164 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1164, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1165 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1165, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1166 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1166, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1167 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1167, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1168 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1168, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1169 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1169, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1170 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1170, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_1171 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1171, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.test_hive_276 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_276, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1177.ts EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1176 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1176 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1176 +as +select + test_hive_1166 as test_hive_1166 + ,test_hive_1164 as test_hive_1164 + ,test_hive_1167 as test_hive_1167 + ,test_hive_276 as test_hive_276 + ,test_hive_1165 as test_hive_1165 + ,test_hive_1170 as test_hive_1170 + ,test_hive_1169 as test_hive_1169 + ,test_hive_1168 as test_hive_1168 + ,test_hive_1171 as test_hive_1171 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1177 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1175 +PREHOOK: Input: default@test_hive_1177 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1176 +POSTHOOK: query: create view test_hive_1176 +as +select + test_hive_1166 as test_hive_1166 + ,test_hive_1164 as test_hive_1164 + ,test_hive_1167 as test_hive_1167 + ,test_hive_276 as test_hive_276 + ,test_hive_1165 as test_hive_1165 + ,test_hive_1170 as test_hive_1170 + ,test_hive_1169 as test_hive_1169 + ,test_hive_1168 as test_hive_1168 + ,test_hive_1171 as test_hive_1171 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1177 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1175 +POSTHOOK: Input: default@test_hive_1177 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1176 +POSTHOOK: Lineage: test_hive_1176.creation_date EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.ds EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.ds_ts SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.source_file_name SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1164 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1164, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1165 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1165, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1166 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1166, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1167 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1167, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1168 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1168, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1169 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1169, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1170 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1170, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_1171 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1171, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.test_hive_276 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_276, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1176.ts EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1173 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1173 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1173 +as +select t1.* +from test_hive_1176 t1 +inner join test_hive_1174 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1174 +PREHOOK: Input: default@test_hive_1175 +PREHOOK: Input: default@test_hive_1176 +PREHOOK: Input: default@test_hive_1177 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1173 +POSTHOOK: query: create view test_hive_1173 +as +select t1.* +from test_hive_1176 t1 +inner join test_hive_1174 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1174 +POSTHOOK: Input: default@test_hive_1175 +POSTHOOK: Input: default@test_hive_1176 +POSTHOOK: Input: default@test_hive_1177 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1173 +POSTHOOK: Lineage: test_hive_1173.creation_date EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.ds EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.ds_ts SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.source_file_name SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1164 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1164, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1165 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1165, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1166 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1166, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1167 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1167, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1168 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1168, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1169 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1169, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1170 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1170, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_1171 EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_1171, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.test_hive_276 SIMPLE [(test_hive_1175)test_hive_1175.FieldSchema(name:test_hive_276, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1173.ts EXPRESSION [(test_hive_1175)test_hive_1175.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1186 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1186 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1186 +( + test_hive_1180 string + ,test_hive_1178 string + ,test_hive_1181 string + ,test_hive_277 string + ,test_hive_1179 string + ,test_hive_1184 string + ,test_hive_1183 string + ,test_hive_1182 string + ,test_hive_1185 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1186 +POSTHOOK: query: create table test_hive_1186 +( + test_hive_1180 string + ,test_hive_1178 string + ,test_hive_1181 string + ,test_hive_277 string + ,test_hive_1179 string + ,test_hive_1184 string + ,test_hive_1183 string + ,test_hive_1182 string + ,test_hive_1185 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1186 +PREHOOK: query: create table if not exists test_hive_1189 +( + test_hive_1180 string + ,test_hive_1178 string + ,test_hive_1181 string + ,test_hive_277 string + ,test_hive_1179 string + ,test_hive_1184 string + ,test_hive_1183 string + ,test_hive_1182 string + ,test_hive_1185 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1189 +POSTHOOK: query: create table if not exists test_hive_1189 +( + test_hive_1180 string + ,test_hive_1178 string + ,test_hive_1181 string + ,test_hive_277 string + ,test_hive_1179 string + ,test_hive_1184 string + ,test_hive_1183 string + ,test_hive_1182 string + ,test_hive_1185 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1189 +PREHOOK: query: drop table if exists test_hive_1188 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1188 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1188 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1188 +POSTHOOK: query: create table if not exists test_hive_1188 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1188 +PREHOOK: query: drop view if exists test_hive_1191 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1191 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1191 +as +select + cast(test_hive_1180 as int) as test_hive_1180 + ,cast(test_hive_1178 as int) as test_hive_1178 + ,cast(test_hive_1181 as int) as test_hive_1181 + ,cast(test_hive_277 as string) as test_hive_277 + ,cast(test_hive_1179 as string) as test_hive_1179 + ,cast(test_hive_1184 as string) as test_hive_1184 + ,cast(test_hive_1183 as string) as test_hive_1183 + ,cast(test_hive_1182 as string) as test_hive_1182 + ,cast(from_unixtime(unix_timestamp(test_hive_1185,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1185 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1189 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1189 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1191 +POSTHOOK: query: create view if not exists test_hive_1191 +as +select + cast(test_hive_1180 as int) as test_hive_1180 + ,cast(test_hive_1178 as int) as test_hive_1178 + ,cast(test_hive_1181 as int) as test_hive_1181 + ,cast(test_hive_277 as string) as test_hive_277 + ,cast(test_hive_1179 as string) as test_hive_1179 + ,cast(test_hive_1184 as string) as test_hive_1184 + ,cast(test_hive_1183 as string) as test_hive_1183 + ,cast(test_hive_1182 as string) as test_hive_1182 + ,cast(from_unixtime(unix_timestamp(test_hive_1185,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1185 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1189 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1189 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1191 +POSTHOOK: Lineage: test_hive_1191.creation_date EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.ds EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.ds_ts SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.source_file_name SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1178 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1178, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1179 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1179, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1180 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1180, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1181 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1181, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1182 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1182, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1183 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1183, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1184 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1184, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_1185 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1185, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.test_hive_277 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_277, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1191.ts EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1190 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1190 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1190 +as +select + test_hive_1180 as test_hive_1180 + ,test_hive_1178 as test_hive_1178 + ,test_hive_1181 as test_hive_1181 + ,test_hive_277 as test_hive_277 + ,test_hive_1179 as test_hive_1179 + ,test_hive_1184 as test_hive_1184 + ,test_hive_1183 as test_hive_1183 + ,test_hive_1182 as test_hive_1182 + ,test_hive_1185 as test_hive_1185 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1191 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1189 +PREHOOK: Input: default@test_hive_1191 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1190 +POSTHOOK: query: create view test_hive_1190 +as +select + test_hive_1180 as test_hive_1180 + ,test_hive_1178 as test_hive_1178 + ,test_hive_1181 as test_hive_1181 + ,test_hive_277 as test_hive_277 + ,test_hive_1179 as test_hive_1179 + ,test_hive_1184 as test_hive_1184 + ,test_hive_1183 as test_hive_1183 + ,test_hive_1182 as test_hive_1182 + ,test_hive_1185 as test_hive_1185 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1191 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1189 +POSTHOOK: Input: default@test_hive_1191 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1190 +POSTHOOK: Lineage: test_hive_1190.creation_date EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.ds EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.ds_ts SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.source_file_name SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1178 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1178, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1179 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1179, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1180 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1180, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1181 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1181, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1182 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1182, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1183 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1183, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1184 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1184, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_1185 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1185, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.test_hive_277 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_277, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1190.ts EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1187 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1187 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1187 +as +select t1.* +from test_hive_1190 t1 +inner join test_hive_1188 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1188 +PREHOOK: Input: default@test_hive_1189 +PREHOOK: Input: default@test_hive_1190 +PREHOOK: Input: default@test_hive_1191 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1187 +POSTHOOK: query: create view test_hive_1187 +as +select t1.* +from test_hive_1190 t1 +inner join test_hive_1188 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1188 +POSTHOOK: Input: default@test_hive_1189 +POSTHOOK: Input: default@test_hive_1190 +POSTHOOK: Input: default@test_hive_1191 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1187 +POSTHOOK: Lineage: test_hive_1187.creation_date EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.ds EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.ds_ts SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.source_file_name SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1178 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1178, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1179 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1179, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1180 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1180, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1181 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1181, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1182 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1182, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1183 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1183, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1184 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1184, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_1185 EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_1185, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.test_hive_277 SIMPLE [(test_hive_1189)test_hive_1189.FieldSchema(name:test_hive_277, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1187.ts EXPRESSION [(test_hive_1189)test_hive_1189.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1202 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1202 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1202 +( + test_hive_1196 string + ,test_hive_1192 string + ,test_hive_1197 string + ,test_hive_278 string + ,test_hive_1195 string + ,test_hive_1194 string + ,test_hive_1193 string + ,test_hive_1200 string + ,test_hive_1199 string + ,test_hive_1198 string + ,test_hive_1201 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1202 +POSTHOOK: query: create table test_hive_1202 +( + test_hive_1196 string + ,test_hive_1192 string + ,test_hive_1197 string + ,test_hive_278 string + ,test_hive_1195 string + ,test_hive_1194 string + ,test_hive_1193 string + ,test_hive_1200 string + ,test_hive_1199 string + ,test_hive_1198 string + ,test_hive_1201 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1202 +PREHOOK: query: create table if not exists test_hive_1205 +( + test_hive_1196 string + ,test_hive_1192 string + ,test_hive_1197 string + ,test_hive_278 string + ,test_hive_1195 string + ,test_hive_1194 string + ,test_hive_1193 string + ,test_hive_1200 string + ,test_hive_1199 string + ,test_hive_1198 string + ,test_hive_1201 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1205 +POSTHOOK: query: create table if not exists test_hive_1205 +( + test_hive_1196 string + ,test_hive_1192 string + ,test_hive_1197 string + ,test_hive_278 string + ,test_hive_1195 string + ,test_hive_1194 string + ,test_hive_1193 string + ,test_hive_1200 string + ,test_hive_1199 string + ,test_hive_1198 string + ,test_hive_1201 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1205 +PREHOOK: query: drop table if exists test_hive_1204 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1204 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1204 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1204 +POSTHOOK: query: create table if not exists test_hive_1204 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1204 +PREHOOK: query: drop view if exists test_hive_1207 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1207 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1207 +as +select + cast(test_hive_1196 as int) as test_hive_1196 + ,cast(test_hive_1192 as int) as test_hive_1192 + ,cast(test_hive_1197 as int) as test_hive_1197 + ,cast(test_hive_278 as decimal) as test_hive_278 + ,cast(test_hive_1195 as string) as test_hive_1195 + ,cast(from_unixtime(unix_timestamp(test_hive_1194,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1194 + ,cast(from_unixtime(unix_timestamp(test_hive_1193,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1193 + ,cast(test_hive_1200 as string) as test_hive_1200 + ,cast(test_hive_1199 as string) as test_hive_1199 + ,cast(test_hive_1198 as string) as test_hive_1198 + ,cast(from_unixtime(unix_timestamp(test_hive_1201,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1201 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1205 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1205 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1207 +POSTHOOK: query: create view if not exists test_hive_1207 +as +select + cast(test_hive_1196 as int) as test_hive_1196 + ,cast(test_hive_1192 as int) as test_hive_1192 + ,cast(test_hive_1197 as int) as test_hive_1197 + ,cast(test_hive_278 as decimal) as test_hive_278 + ,cast(test_hive_1195 as string) as test_hive_1195 + ,cast(from_unixtime(unix_timestamp(test_hive_1194,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1194 + ,cast(from_unixtime(unix_timestamp(test_hive_1193,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1193 + ,cast(test_hive_1200 as string) as test_hive_1200 + ,cast(test_hive_1199 as string) as test_hive_1199 + ,cast(test_hive_1198 as string) as test_hive_1198 + ,cast(from_unixtime(unix_timestamp(test_hive_1201,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1201 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1205 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1205 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1207 +POSTHOOK: Lineage: test_hive_1207.creation_date EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.ds EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.ds_ts SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.source_file_name SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1192 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1192, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1193 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1193, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1194 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1194, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1195 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1195, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1196 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1196, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1197 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1197, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1198 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1198, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1199 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1199, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1200 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1200, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_1201 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1201, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.test_hive_278 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_278, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1207.ts EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1206 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1206 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1206 +as +select + test_hive_1196 as test_hive_1196 + ,test_hive_1192 as test_hive_1192 + ,test_hive_1197 as test_hive_1197 + ,test_hive_278 as test_hive_278 + ,test_hive_1195 as test_hive_1195 + ,test_hive_1194 as test_hive_1194 + ,test_hive_1193 as test_hive_1193 + ,test_hive_1200 as test_hive_1200 + ,test_hive_1199 as test_hive_1199 + ,test_hive_1198 as test_hive_1198 + ,test_hive_1201 as test_hive_1201 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1207 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1205 +PREHOOK: Input: default@test_hive_1207 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1206 +POSTHOOK: query: create view test_hive_1206 +as +select + test_hive_1196 as test_hive_1196 + ,test_hive_1192 as test_hive_1192 + ,test_hive_1197 as test_hive_1197 + ,test_hive_278 as test_hive_278 + ,test_hive_1195 as test_hive_1195 + ,test_hive_1194 as test_hive_1194 + ,test_hive_1193 as test_hive_1193 + ,test_hive_1200 as test_hive_1200 + ,test_hive_1199 as test_hive_1199 + ,test_hive_1198 as test_hive_1198 + ,test_hive_1201 as test_hive_1201 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1207 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1205 +POSTHOOK: Input: default@test_hive_1207 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1206 +POSTHOOK: Lineage: test_hive_1206.creation_date EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.ds EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.ds_ts SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.source_file_name SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1192 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1192, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1193 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1193, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1194 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1194, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1195 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1195, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1196 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1196, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1197 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1197, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1198 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1198, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1199 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1199, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1200 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1200, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_1201 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1201, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.test_hive_278 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_278, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1206.ts EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1203 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1203 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1203 +as +select t1.* +from test_hive_1206 t1 +inner join test_hive_1204 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1204 +PREHOOK: Input: default@test_hive_1205 +PREHOOK: Input: default@test_hive_1206 +PREHOOK: Input: default@test_hive_1207 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1203 +POSTHOOK: query: create view test_hive_1203 +as +select t1.* +from test_hive_1206 t1 +inner join test_hive_1204 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1204 +POSTHOOK: Input: default@test_hive_1205 +POSTHOOK: Input: default@test_hive_1206 +POSTHOOK: Input: default@test_hive_1207 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1203 +POSTHOOK: Lineage: test_hive_1203.creation_date EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.ds EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.ds_ts SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.source_file_name SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1192 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1192, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1193 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1193, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1194 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1194, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1195 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1195, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1196 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1196, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1197 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1197, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1198 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1198, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1199 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1199, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1200 SIMPLE [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1200, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_1201 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_1201, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.test_hive_278 EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:test_hive_278, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1203.ts EXPRESSION [(test_hive_1205)test_hive_1205.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1312 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1312 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1312 +( + test_hive_1307 string + ,test_hive_1305 string + ,test_hive_1308 string + ,test_hive_334 string + ,test_hive_1306 string + ,test_hive_1310 string + ,test_hive_1309 string + ,test_hive_1311 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1312 +POSTHOOK: query: create table test_hive_1312 +( + test_hive_1307 string + ,test_hive_1305 string + ,test_hive_1308 string + ,test_hive_334 string + ,test_hive_1306 string + ,test_hive_1310 string + ,test_hive_1309 string + ,test_hive_1311 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1312 +PREHOOK: query: create table if not exists test_hive_1315 +( + test_hive_1307 string + ,test_hive_1305 string + ,test_hive_1308 string + ,test_hive_334 string + ,test_hive_1306 string + ,test_hive_1310 string + ,test_hive_1309 string + ,test_hive_1311 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1315 +POSTHOOK: query: create table if not exists test_hive_1315 +( + test_hive_1307 string + ,test_hive_1305 string + ,test_hive_1308 string + ,test_hive_334 string + ,test_hive_1306 string + ,test_hive_1310 string + ,test_hive_1309 string + ,test_hive_1311 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1315 +PREHOOK: query: drop table if exists test_hive_1314 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1314 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1314 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1314 +POSTHOOK: query: create table if not exists test_hive_1314 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1314 +PREHOOK: query: drop view if exists test_hive_1317 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1317 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1317 +as +select + cast(test_hive_1307 as int) as test_hive_1307 + ,cast(test_hive_1305 as int) as test_hive_1305 + ,cast(test_hive_1308 as int) as test_hive_1308 + ,cast(test_hive_334 as string) as test_hive_334 + ,cast(test_hive_1306 as string) as test_hive_1306 + ,cast(test_hive_1310 as string) as test_hive_1310 + ,cast(test_hive_1309 as string) as test_hive_1309 + ,cast(from_unixtime(unix_timestamp(test_hive_1311,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1311 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1315 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1315 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1317 +POSTHOOK: query: create view if not exists test_hive_1317 +as +select + cast(test_hive_1307 as int) as test_hive_1307 + ,cast(test_hive_1305 as int) as test_hive_1305 + ,cast(test_hive_1308 as int) as test_hive_1308 + ,cast(test_hive_334 as string) as test_hive_334 + ,cast(test_hive_1306 as string) as test_hive_1306 + ,cast(test_hive_1310 as string) as test_hive_1310 + ,cast(test_hive_1309 as string) as test_hive_1309 + ,cast(from_unixtime(unix_timestamp(test_hive_1311,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1311 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1315 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1315 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1317 +POSTHOOK: Lineage: test_hive_1317.creation_date EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.ds EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.ds_ts SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.source_file_name SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1305 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1305, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1306 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1306, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1307 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1307, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1308 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1308, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1309 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1309, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1310 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1310, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_1311 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1311, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.test_hive_334 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_334, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1317.ts EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1316 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1316 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1316 +as +select + test_hive_1307 as test_hive_1307 + ,test_hive_1305 as test_hive_1305 + ,test_hive_1308 as test_hive_1308 + ,test_hive_334 as test_hive_334 + ,test_hive_1306 as test_hive_1306 + ,test_hive_1310 as test_hive_1310 + ,test_hive_1309 as test_hive_1309 + ,test_hive_1311 as test_hive_1311 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1317 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1315 +PREHOOK: Input: default@test_hive_1317 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1316 +POSTHOOK: query: create view test_hive_1316 +as +select + test_hive_1307 as test_hive_1307 + ,test_hive_1305 as test_hive_1305 + ,test_hive_1308 as test_hive_1308 + ,test_hive_334 as test_hive_334 + ,test_hive_1306 as test_hive_1306 + ,test_hive_1310 as test_hive_1310 + ,test_hive_1309 as test_hive_1309 + ,test_hive_1311 as test_hive_1311 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1317 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1315 +POSTHOOK: Input: default@test_hive_1317 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1316 +POSTHOOK: Lineage: test_hive_1316.creation_date EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.ds EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.ds_ts SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.source_file_name SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1305 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1305, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1306 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1306, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1307 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1307, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1308 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1308, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1309 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1309, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1310 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1310, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_1311 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1311, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.test_hive_334 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_334, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1316.ts EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1313 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1313 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1313 +as +select t1.* +from test_hive_1316 t1 +inner join test_hive_1314 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1314 +PREHOOK: Input: default@test_hive_1315 +PREHOOK: Input: default@test_hive_1316 +PREHOOK: Input: default@test_hive_1317 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1313 +POSTHOOK: query: create view test_hive_1313 +as +select t1.* +from test_hive_1316 t1 +inner join test_hive_1314 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1314 +POSTHOOK: Input: default@test_hive_1315 +POSTHOOK: Input: default@test_hive_1316 +POSTHOOK: Input: default@test_hive_1317 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1313 +POSTHOOK: Lineage: test_hive_1313.creation_date EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.ds EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.ds_ts SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.source_file_name SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1305 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1305, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1306 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1306, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1307 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1307, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1308 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1308, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1309 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1309, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1310 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1310, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_1311 EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_1311, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.test_hive_334 SIMPLE [(test_hive_1315)test_hive_1315.FieldSchema(name:test_hive_334, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1313.ts EXPRESSION [(test_hive_1315)test_hive_1315.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1344 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1344 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1344 +( + test_hive_1338 string + ,test_hive_1334 string + ,test_hive_1339 string + ,test_hive_336 string + ,test_hive_1337 string + ,test_hive_1336 string + ,test_hive_1335 string + ,test_hive_1342 string + ,test_hive_1341 string + ,test_hive_1340 string + ,test_hive_1343 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1344 +POSTHOOK: query: create table test_hive_1344 +( + test_hive_1338 string + ,test_hive_1334 string + ,test_hive_1339 string + ,test_hive_336 string + ,test_hive_1337 string + ,test_hive_1336 string + ,test_hive_1335 string + ,test_hive_1342 string + ,test_hive_1341 string + ,test_hive_1340 string + ,test_hive_1343 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1344 +PREHOOK: query: create table if not exists test_hive_1347 +( + test_hive_1338 string + ,test_hive_1334 string + ,test_hive_1339 string + ,test_hive_336 string + ,test_hive_1337 string + ,test_hive_1336 string + ,test_hive_1335 string + ,test_hive_1342 string + ,test_hive_1341 string + ,test_hive_1340 string + ,test_hive_1343 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1347 +POSTHOOK: query: create table if not exists test_hive_1347 +( + test_hive_1338 string + ,test_hive_1334 string + ,test_hive_1339 string + ,test_hive_336 string + ,test_hive_1337 string + ,test_hive_1336 string + ,test_hive_1335 string + ,test_hive_1342 string + ,test_hive_1341 string + ,test_hive_1340 string + ,test_hive_1343 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1347 +PREHOOK: query: drop table if exists test_hive_1346 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1346 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1346 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1346 +POSTHOOK: query: create table if not exists test_hive_1346 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1346 +PREHOOK: query: drop view if exists test_hive_1349 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1349 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1349 +as +select + cast(test_hive_1338 as int) as test_hive_1338 + ,cast(test_hive_1334 as int) as test_hive_1334 + ,cast(test_hive_1339 as int) as test_hive_1339 + ,cast(test_hive_336 as string) as test_hive_336 + ,cast(test_hive_1337 as string) as test_hive_1337 + ,cast(from_unixtime(unix_timestamp(test_hive_1336,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1336 + ,cast(from_unixtime(unix_timestamp(test_hive_1335,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1335 + ,cast(test_hive_1342 as string) as test_hive_1342 + ,cast(test_hive_1341 as string) as test_hive_1341 + ,cast(test_hive_1340 as string) as test_hive_1340 + ,cast(from_unixtime(unix_timestamp(test_hive_1343,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1343 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1347 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1347 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1349 +POSTHOOK: query: create view if not exists test_hive_1349 +as +select + cast(test_hive_1338 as int) as test_hive_1338 + ,cast(test_hive_1334 as int) as test_hive_1334 + ,cast(test_hive_1339 as int) as test_hive_1339 + ,cast(test_hive_336 as string) as test_hive_336 + ,cast(test_hive_1337 as string) as test_hive_1337 + ,cast(from_unixtime(unix_timestamp(test_hive_1336,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1336 + ,cast(from_unixtime(unix_timestamp(test_hive_1335,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1335 + ,cast(test_hive_1342 as string) as test_hive_1342 + ,cast(test_hive_1341 as string) as test_hive_1341 + ,cast(test_hive_1340 as string) as test_hive_1340 + ,cast(from_unixtime(unix_timestamp(test_hive_1343,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1343 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1347 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1347 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1349 +POSTHOOK: Lineage: test_hive_1349.creation_date EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.ds EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.ds_ts SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.source_file_name SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1334 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1334, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1335 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1335, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1336 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1336, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1337 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1337, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1338 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1338, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1339 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1339, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1340 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1340, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1341 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1341, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1342 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_1343 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1343, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.test_hive_336 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_336, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1349.ts EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1348 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1348 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1348 +as +select + test_hive_1338 as test_hive_1338 + ,test_hive_1334 as test_hive_1334 + ,test_hive_1339 as test_hive_1339 + ,test_hive_336 as test_hive_336 + ,test_hive_1337 as test_hive_1337 + ,test_hive_1336 as test_hive_1336 + ,test_hive_1335 as test_hive_1335 + ,test_hive_1342 as test_hive_1342 + ,test_hive_1341 as test_hive_1341 + ,test_hive_1340 as test_hive_1340 + ,test_hive_1343 as test_hive_1343 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1349 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1347 +PREHOOK: Input: default@test_hive_1349 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1348 +POSTHOOK: query: create view test_hive_1348 +as +select + test_hive_1338 as test_hive_1338 + ,test_hive_1334 as test_hive_1334 + ,test_hive_1339 as test_hive_1339 + ,test_hive_336 as test_hive_336 + ,test_hive_1337 as test_hive_1337 + ,test_hive_1336 as test_hive_1336 + ,test_hive_1335 as test_hive_1335 + ,test_hive_1342 as test_hive_1342 + ,test_hive_1341 as test_hive_1341 + ,test_hive_1340 as test_hive_1340 + ,test_hive_1343 as test_hive_1343 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1349 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1347 +POSTHOOK: Input: default@test_hive_1349 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1348 +POSTHOOK: Lineage: test_hive_1348.creation_date EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.ds EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.ds_ts SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.source_file_name SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1334 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1334, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1335 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1335, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1336 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1336, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1337 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1337, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1338 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1338, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1339 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1339, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1340 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1340, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1341 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1341, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1342 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_1343 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1343, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.test_hive_336 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_336, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1348.ts EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1345 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1345 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1345 +as +select t1.* +from test_hive_1348 t1 +inner join test_hive_1346 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1346 +PREHOOK: Input: default@test_hive_1347 +PREHOOK: Input: default@test_hive_1348 +PREHOOK: Input: default@test_hive_1349 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1345 +POSTHOOK: query: create view test_hive_1345 +as +select t1.* +from test_hive_1348 t1 +inner join test_hive_1346 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1346 +POSTHOOK: Input: default@test_hive_1347 +POSTHOOK: Input: default@test_hive_1348 +POSTHOOK: Input: default@test_hive_1349 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1345 +POSTHOOK: Lineage: test_hive_1345.creation_date EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.ds EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.ds_ts SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.source_file_name SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1334 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1334, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1335 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1335, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1336 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1336, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1337 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1337, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1338 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1338, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1339 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1339, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1340 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1340, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1341 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1341, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1342 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1342, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_1343 EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_1343, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.test_hive_336 SIMPLE [(test_hive_1347)test_hive_1347.FieldSchema(name:test_hive_336, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1345.ts EXPRESSION [(test_hive_1347)test_hive_1347.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1360 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1360 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1360 +( + test_hive_1354 string + ,test_hive_1350 string + ,test_hive_1355 string + ,test_hive_337 string + ,test_hive_1353 string + ,test_hive_1352 string + ,test_hive_1351 string + ,test_hive_1358 string + ,test_hive_1357 string + ,test_hive_1356 string + ,test_hive_1359 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1360 +POSTHOOK: query: create table test_hive_1360 +( + test_hive_1354 string + ,test_hive_1350 string + ,test_hive_1355 string + ,test_hive_337 string + ,test_hive_1353 string + ,test_hive_1352 string + ,test_hive_1351 string + ,test_hive_1358 string + ,test_hive_1357 string + ,test_hive_1356 string + ,test_hive_1359 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1360 +PREHOOK: query: create table if not exists test_hive_1363 +( + test_hive_1354 string + ,test_hive_1350 string + ,test_hive_1355 string + ,test_hive_337 string + ,test_hive_1353 string + ,test_hive_1352 string + ,test_hive_1351 string + ,test_hive_1358 string + ,test_hive_1357 string + ,test_hive_1356 string + ,test_hive_1359 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1363 +POSTHOOK: query: create table if not exists test_hive_1363 +( + test_hive_1354 string + ,test_hive_1350 string + ,test_hive_1355 string + ,test_hive_337 string + ,test_hive_1353 string + ,test_hive_1352 string + ,test_hive_1351 string + ,test_hive_1358 string + ,test_hive_1357 string + ,test_hive_1356 string + ,test_hive_1359 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1363 +PREHOOK: query: drop table if exists test_hive_1362 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1362 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1362 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1362 +POSTHOOK: query: create table if not exists test_hive_1362 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1362 +PREHOOK: query: drop view if exists test_hive_1365 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1365 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1365 +as +select + cast(test_hive_1354 as int) as test_hive_1354 + ,cast(test_hive_1350 as int) as test_hive_1350 + ,cast(test_hive_1355 as int) as test_hive_1355 + ,cast(test_hive_337 as string) as test_hive_337 + ,cast(test_hive_1353 as string) as test_hive_1353 + ,cast(from_unixtime(unix_timestamp(test_hive_1352,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1352 + ,cast(from_unixtime(unix_timestamp(test_hive_1351,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1351 + ,cast(test_hive_1358 as string) as test_hive_1358 + ,cast(test_hive_1357 as string) as test_hive_1357 + ,cast(test_hive_1356 as string) as test_hive_1356 + ,cast(from_unixtime(unix_timestamp(test_hive_1359,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1359 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1363 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1363 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1365 +POSTHOOK: query: create view if not exists test_hive_1365 +as +select + cast(test_hive_1354 as int) as test_hive_1354 + ,cast(test_hive_1350 as int) as test_hive_1350 + ,cast(test_hive_1355 as int) as test_hive_1355 + ,cast(test_hive_337 as string) as test_hive_337 + ,cast(test_hive_1353 as string) as test_hive_1353 + ,cast(from_unixtime(unix_timestamp(test_hive_1352,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1352 + ,cast(from_unixtime(unix_timestamp(test_hive_1351,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1351 + ,cast(test_hive_1358 as string) as test_hive_1358 + ,cast(test_hive_1357 as string) as test_hive_1357 + ,cast(test_hive_1356 as string) as test_hive_1356 + ,cast(from_unixtime(unix_timestamp(test_hive_1359,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1359 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1363 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1363 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1365 +POSTHOOK: Lineage: test_hive_1365.creation_date EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.ds EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.ds_ts SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.source_file_name SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1350 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1350, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1351 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1351, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1352 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1352, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1353 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1353, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1354 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1354, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1355 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1355, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1356 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1356, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1357 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1357, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1358 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1358, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_1359 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1359, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.test_hive_337 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_337, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1365.ts EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1364 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1364 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1364 +as +select + test_hive_1354 as test_hive_1354 + ,test_hive_1350 as test_hive_1350 + ,test_hive_1355 as test_hive_1355 + ,test_hive_337 as test_hive_337 + ,test_hive_1353 as test_hive_1353 + ,test_hive_1352 as test_hive_1352 + ,test_hive_1351 as test_hive_1351 + ,test_hive_1358 as test_hive_1358 + ,test_hive_1357 as test_hive_1357 + ,test_hive_1356 as test_hive_1356 + ,test_hive_1359 as test_hive_1359 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1365 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1363 +PREHOOK: Input: default@test_hive_1365 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1364 +POSTHOOK: query: create view test_hive_1364 +as +select + test_hive_1354 as test_hive_1354 + ,test_hive_1350 as test_hive_1350 + ,test_hive_1355 as test_hive_1355 + ,test_hive_337 as test_hive_337 + ,test_hive_1353 as test_hive_1353 + ,test_hive_1352 as test_hive_1352 + ,test_hive_1351 as test_hive_1351 + ,test_hive_1358 as test_hive_1358 + ,test_hive_1357 as test_hive_1357 + ,test_hive_1356 as test_hive_1356 + ,test_hive_1359 as test_hive_1359 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1365 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1363 +POSTHOOK: Input: default@test_hive_1365 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1364 +POSTHOOK: Lineage: test_hive_1364.creation_date EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.ds EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.ds_ts SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.source_file_name SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1350 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1350, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1351 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1351, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1352 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1352, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1353 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1353, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1354 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1354, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1355 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1355, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1356 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1356, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1357 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1357, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1358 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1358, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_1359 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1359, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.test_hive_337 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_337, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1364.ts EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1361 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1361 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1361 +as +select t1.* +from test_hive_1364 t1 +inner join test_hive_1362 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1362 +PREHOOK: Input: default@test_hive_1363 +PREHOOK: Input: default@test_hive_1364 +PREHOOK: Input: default@test_hive_1365 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1361 +POSTHOOK: query: create view test_hive_1361 +as +select t1.* +from test_hive_1364 t1 +inner join test_hive_1362 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1362 +POSTHOOK: Input: default@test_hive_1363 +POSTHOOK: Input: default@test_hive_1364 +POSTHOOK: Input: default@test_hive_1365 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1361 +POSTHOOK: Lineage: test_hive_1361.creation_date EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.ds EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.ds_ts SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.source_file_name SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1350 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1350, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1351 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1351, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1352 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1352, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1353 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1353, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1354 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1354, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1355 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1355, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1356 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1356, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1357 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1357, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1358 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1358, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_1359 EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_1359, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.test_hive_337 SIMPLE [(test_hive_1363)test_hive_1363.FieldSchema(name:test_hive_337, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1361.ts EXPRESSION [(test_hive_1363)test_hive_1363.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1433 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1433 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1433 +( + test_hive_1427 string + ,test_hive_1423 string + ,test_hive_1428 string + ,test_hive_413 string + ,test_hive_1426 string + ,test_hive_1425 string + ,test_hive_1424 string + ,test_hive_1431 string + ,test_hive_1430 string + ,test_hive_1429 string + ,test_hive_1432 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1433 +POSTHOOK: query: create table test_hive_1433 +( + test_hive_1427 string + ,test_hive_1423 string + ,test_hive_1428 string + ,test_hive_413 string + ,test_hive_1426 string + ,test_hive_1425 string + ,test_hive_1424 string + ,test_hive_1431 string + ,test_hive_1430 string + ,test_hive_1429 string + ,test_hive_1432 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1433 +PREHOOK: query: create table if not exists test_hive_1436 +( + test_hive_1427 string + ,test_hive_1423 string + ,test_hive_1428 string + ,test_hive_413 string + ,test_hive_1426 string + ,test_hive_1425 string + ,test_hive_1424 string + ,test_hive_1431 string + ,test_hive_1430 string + ,test_hive_1429 string + ,test_hive_1432 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1436 +POSTHOOK: query: create table if not exists test_hive_1436 +( + test_hive_1427 string + ,test_hive_1423 string + ,test_hive_1428 string + ,test_hive_413 string + ,test_hive_1426 string + ,test_hive_1425 string + ,test_hive_1424 string + ,test_hive_1431 string + ,test_hive_1430 string + ,test_hive_1429 string + ,test_hive_1432 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1436 +PREHOOK: query: drop table if exists test_hive_1435 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1435 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1435 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1435 +POSTHOOK: query: create table if not exists test_hive_1435 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1435 +PREHOOK: query: drop view if exists test_hive_1438 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1438 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1438 +as +select + cast(test_hive_1427 as int) as test_hive_1427 + ,cast(test_hive_1423 as int) as test_hive_1423 + ,cast(test_hive_1428 as int) as test_hive_1428 + ,cast(test_hive_413 as decimal) as test_hive_413 + ,cast(test_hive_1426 as string) as test_hive_1426 + ,cast(from_unixtime(unix_timestamp(test_hive_1425,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1425 + ,cast(from_unixtime(unix_timestamp(test_hive_1424,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1424 + ,cast(test_hive_1431 as string) as test_hive_1431 + ,cast(test_hive_1430 as string) as test_hive_1430 + ,cast(test_hive_1429 as string) as test_hive_1429 + ,cast(from_unixtime(unix_timestamp(test_hive_1432,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1432 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1436 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1436 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1438 +POSTHOOK: query: create view if not exists test_hive_1438 +as +select + cast(test_hive_1427 as int) as test_hive_1427 + ,cast(test_hive_1423 as int) as test_hive_1423 + ,cast(test_hive_1428 as int) as test_hive_1428 + ,cast(test_hive_413 as decimal) as test_hive_413 + ,cast(test_hive_1426 as string) as test_hive_1426 + ,cast(from_unixtime(unix_timestamp(test_hive_1425,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1425 + ,cast(from_unixtime(unix_timestamp(test_hive_1424,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1424 + ,cast(test_hive_1431 as string) as test_hive_1431 + ,cast(test_hive_1430 as string) as test_hive_1430 + ,cast(test_hive_1429 as string) as test_hive_1429 + ,cast(from_unixtime(unix_timestamp(test_hive_1432,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1432 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1436 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1436 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1438 +POSTHOOK: Lineage: test_hive_1438.creation_date EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.ds EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.ds_ts SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.source_file_name SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1423 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1423, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1424 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1424, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1425 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1425, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1426 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1426, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1427 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1427, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1428 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1428, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1429 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1429, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1430 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1430, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1431 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1431, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_1432 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1432, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.test_hive_413 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_413, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1438.ts EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1437 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1437 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1437 +as +select + test_hive_1427 as test_hive_1427 + ,test_hive_1423 as test_hive_1423 + ,test_hive_1428 as test_hive_1428 + ,test_hive_413 as test_hive_413 + ,test_hive_1426 as test_hive_1426 + ,test_hive_1425 as test_hive_1425 + ,test_hive_1424 as test_hive_1424 + ,test_hive_1431 as test_hive_1431 + ,test_hive_1430 as test_hive_1430 + ,test_hive_1429 as test_hive_1429 + ,test_hive_1432 as test_hive_1432 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1438 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1436 +PREHOOK: Input: default@test_hive_1438 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1437 +POSTHOOK: query: create view test_hive_1437 +as +select + test_hive_1427 as test_hive_1427 + ,test_hive_1423 as test_hive_1423 + ,test_hive_1428 as test_hive_1428 + ,test_hive_413 as test_hive_413 + ,test_hive_1426 as test_hive_1426 + ,test_hive_1425 as test_hive_1425 + ,test_hive_1424 as test_hive_1424 + ,test_hive_1431 as test_hive_1431 + ,test_hive_1430 as test_hive_1430 + ,test_hive_1429 as test_hive_1429 + ,test_hive_1432 as test_hive_1432 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1438 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1436 +POSTHOOK: Input: default@test_hive_1438 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1437 +POSTHOOK: Lineage: test_hive_1437.creation_date EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.ds EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.ds_ts SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.source_file_name SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1423 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1423, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1424 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1424, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1425 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1425, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1426 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1426, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1427 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1427, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1428 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1428, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1429 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1429, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1430 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1430, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1431 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1431, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_1432 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1432, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.test_hive_413 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_413, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1437.ts EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1434 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1434 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1434 +as +select t1.* +from test_hive_1437 t1 +inner join test_hive_1435 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1435 +PREHOOK: Input: default@test_hive_1436 +PREHOOK: Input: default@test_hive_1437 +PREHOOK: Input: default@test_hive_1438 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1434 +POSTHOOK: query: create view test_hive_1434 +as +select t1.* +from test_hive_1437 t1 +inner join test_hive_1435 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1435 +POSTHOOK: Input: default@test_hive_1436 +POSTHOOK: Input: default@test_hive_1437 +POSTHOOK: Input: default@test_hive_1438 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1434 +POSTHOOK: Lineage: test_hive_1434.creation_date EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.ds EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.ds_ts SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.source_file_name SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1423 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1423, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1424 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1424, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1425 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1425, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1426 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1426, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1427 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1427, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1428 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1428, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1429 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1429, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1430 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1430, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1431 SIMPLE [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1431, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_1432 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_1432, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.test_hive_413 EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:test_hive_413, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1434.ts EXPRESSION [(test_hive_1436)test_hive_1436.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1447 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1447 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1447 +( + test_hive_1441 string + ,test_hive_1439 string + ,test_hive_1442 string + ,test_hive_414 string + ,test_hive_1440 string + ,test_hive_1445 string + ,test_hive_1444 string + ,test_hive_1443 string + ,test_hive_1446 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1447 +POSTHOOK: query: create table test_hive_1447 +( + test_hive_1441 string + ,test_hive_1439 string + ,test_hive_1442 string + ,test_hive_414 string + ,test_hive_1440 string + ,test_hive_1445 string + ,test_hive_1444 string + ,test_hive_1443 string + ,test_hive_1446 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1447 +PREHOOK: query: create table if not exists test_hive_1450 +( + test_hive_1441 string + ,test_hive_1439 string + ,test_hive_1442 string + ,test_hive_414 string + ,test_hive_1440 string + ,test_hive_1445 string + ,test_hive_1444 string + ,test_hive_1443 string + ,test_hive_1446 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1450 +POSTHOOK: query: create table if not exists test_hive_1450 +( + test_hive_1441 string + ,test_hive_1439 string + ,test_hive_1442 string + ,test_hive_414 string + ,test_hive_1440 string + ,test_hive_1445 string + ,test_hive_1444 string + ,test_hive_1443 string + ,test_hive_1446 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1450 +PREHOOK: query: drop table if exists test_hive_1449 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1449 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1449 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1449 +POSTHOOK: query: create table if not exists test_hive_1449 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1449 +PREHOOK: query: drop view if exists test_hive_1452 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1452 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1452 +as +select + cast(test_hive_1441 as int) as test_hive_1441 + ,cast(test_hive_1439 as int) as test_hive_1439 + ,cast(test_hive_1442 as int) as test_hive_1442 + ,cast(test_hive_414 as string) as test_hive_414 + ,cast(test_hive_1440 as string) as test_hive_1440 + ,cast(test_hive_1445 as string) as test_hive_1445 + ,cast(test_hive_1444 as string) as test_hive_1444 + ,cast(test_hive_1443 as string) as test_hive_1443 + ,cast(from_unixtime(unix_timestamp(test_hive_1446,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1446 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1450 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1450 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1452 +POSTHOOK: query: create view if not exists test_hive_1452 +as +select + cast(test_hive_1441 as int) as test_hive_1441 + ,cast(test_hive_1439 as int) as test_hive_1439 + ,cast(test_hive_1442 as int) as test_hive_1442 + ,cast(test_hive_414 as string) as test_hive_414 + ,cast(test_hive_1440 as string) as test_hive_1440 + ,cast(test_hive_1445 as string) as test_hive_1445 + ,cast(test_hive_1444 as string) as test_hive_1444 + ,cast(test_hive_1443 as string) as test_hive_1443 + ,cast(from_unixtime(unix_timestamp(test_hive_1446,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1446 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1450 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1450 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1452 +POSTHOOK: Lineage: test_hive_1452.creation_date EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.ds EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.ds_ts SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.source_file_name SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1439 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1439, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1440 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1440, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1441 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1441, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1442 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1442, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1443 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1443, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1444 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1444, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1445 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1445, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_1446 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1446, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.test_hive_414 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_414, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1452.ts EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1451 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1451 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1451 +as +select + test_hive_1441 as test_hive_1441 + ,test_hive_1439 as test_hive_1439 + ,test_hive_1442 as test_hive_1442 + ,test_hive_414 as test_hive_414 + ,test_hive_1440 as test_hive_1440 + ,test_hive_1445 as test_hive_1445 + ,test_hive_1444 as test_hive_1444 + ,test_hive_1443 as test_hive_1443 + ,test_hive_1446 as test_hive_1446 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1452 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1450 +PREHOOK: Input: default@test_hive_1452 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1451 +POSTHOOK: query: create view test_hive_1451 +as +select + test_hive_1441 as test_hive_1441 + ,test_hive_1439 as test_hive_1439 + ,test_hive_1442 as test_hive_1442 + ,test_hive_414 as test_hive_414 + ,test_hive_1440 as test_hive_1440 + ,test_hive_1445 as test_hive_1445 + ,test_hive_1444 as test_hive_1444 + ,test_hive_1443 as test_hive_1443 + ,test_hive_1446 as test_hive_1446 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1452 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1450 +POSTHOOK: Input: default@test_hive_1452 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1451 +POSTHOOK: Lineage: test_hive_1451.creation_date EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.ds EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.ds_ts SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.source_file_name SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1439 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1439, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1440 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1440, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1441 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1441, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1442 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1442, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1443 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1443, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1444 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1444, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1445 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1445, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_1446 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1446, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.test_hive_414 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_414, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1451.ts EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1448 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1448 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1448 +as +select t1.* +from test_hive_1451 t1 +inner join test_hive_1449 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1449 +PREHOOK: Input: default@test_hive_1450 +PREHOOK: Input: default@test_hive_1451 +PREHOOK: Input: default@test_hive_1452 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1448 +POSTHOOK: query: create view test_hive_1448 +as +select t1.* +from test_hive_1451 t1 +inner join test_hive_1449 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1449 +POSTHOOK: Input: default@test_hive_1450 +POSTHOOK: Input: default@test_hive_1451 +POSTHOOK: Input: default@test_hive_1452 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1448 +POSTHOOK: Lineage: test_hive_1448.creation_date EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.ds EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.ds_ts SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.source_file_name SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1439 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1439, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1440 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1440, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1441 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1441, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1442 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1442, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1443 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1443, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1444 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1444, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1445 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1445, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_1446 EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_1446, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.test_hive_414 SIMPLE [(test_hive_1450)test_hive_1450.FieldSchema(name:test_hive_414, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1448.ts EXPRESSION [(test_hive_1450)test_hive_1450.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1463 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1463 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1463 +( + test_hive_1457 string + ,test_hive_1453 string + ,test_hive_1458 string + ,test_hive_415 string + ,test_hive_1456 string + ,test_hive_1455 string + ,test_hive_1454 string + ,test_hive_1461 string + ,test_hive_1460 string + ,test_hive_1459 string + ,test_hive_1462 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1463 +POSTHOOK: query: create table test_hive_1463 +( + test_hive_1457 string + ,test_hive_1453 string + ,test_hive_1458 string + ,test_hive_415 string + ,test_hive_1456 string + ,test_hive_1455 string + ,test_hive_1454 string + ,test_hive_1461 string + ,test_hive_1460 string + ,test_hive_1459 string + ,test_hive_1462 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1463 +PREHOOK: query: create table if not exists test_hive_1466 +( + test_hive_1457 string + ,test_hive_1453 string + ,test_hive_1458 string + ,test_hive_415 string + ,test_hive_1456 string + ,test_hive_1455 string + ,test_hive_1454 string + ,test_hive_1461 string + ,test_hive_1460 string + ,test_hive_1459 string + ,test_hive_1462 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1466 +POSTHOOK: query: create table if not exists test_hive_1466 +( + test_hive_1457 string + ,test_hive_1453 string + ,test_hive_1458 string + ,test_hive_415 string + ,test_hive_1456 string + ,test_hive_1455 string + ,test_hive_1454 string + ,test_hive_1461 string + ,test_hive_1460 string + ,test_hive_1459 string + ,test_hive_1462 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1466 +PREHOOK: query: drop table if exists test_hive_1465 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1465 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1465 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1465 +POSTHOOK: query: create table if not exists test_hive_1465 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1465 +PREHOOK: query: drop view if exists test_hive_1468 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1468 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1468 +as +select + cast(test_hive_1457 as int) as test_hive_1457 + ,cast(test_hive_1453 as int) as test_hive_1453 + ,cast(test_hive_1458 as int) as test_hive_1458 + ,cast(test_hive_415 as decimal) as test_hive_415 + ,cast(test_hive_1456 as string) as test_hive_1456 + ,cast(from_unixtime(unix_timestamp(test_hive_1455,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1455 + ,cast(from_unixtime(unix_timestamp(test_hive_1454,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1454 + ,cast(test_hive_1461 as string) as test_hive_1461 + ,cast(test_hive_1460 as string) as test_hive_1460 + ,cast(test_hive_1459 as string) as test_hive_1459 + ,cast(from_unixtime(unix_timestamp(test_hive_1462,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1462 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1466 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1466 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1468 +POSTHOOK: query: create view if not exists test_hive_1468 +as +select + cast(test_hive_1457 as int) as test_hive_1457 + ,cast(test_hive_1453 as int) as test_hive_1453 + ,cast(test_hive_1458 as int) as test_hive_1458 + ,cast(test_hive_415 as decimal) as test_hive_415 + ,cast(test_hive_1456 as string) as test_hive_1456 + ,cast(from_unixtime(unix_timestamp(test_hive_1455,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1455 + ,cast(from_unixtime(unix_timestamp(test_hive_1454,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1454 + ,cast(test_hive_1461 as string) as test_hive_1461 + ,cast(test_hive_1460 as string) as test_hive_1460 + ,cast(test_hive_1459 as string) as test_hive_1459 + ,cast(from_unixtime(unix_timestamp(test_hive_1462,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1462 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1466 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1466 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1468 +POSTHOOK: Lineage: test_hive_1468.creation_date EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.ds EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.ds_ts SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.source_file_name SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1453 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1453, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1454 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1454, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1455 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1455, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1456 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1456, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1457 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1457, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1458 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1458, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1459 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1459, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1460 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1460, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1461 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1461, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_1462 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1462, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.test_hive_415 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_415, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1468.ts EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1467 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1467 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1467 +as +select + test_hive_1457 as test_hive_1457 + ,test_hive_1453 as test_hive_1453 + ,test_hive_1458 as test_hive_1458 + ,test_hive_415 as test_hive_415 + ,test_hive_1456 as test_hive_1456 + ,test_hive_1455 as test_hive_1455 + ,test_hive_1454 as test_hive_1454 + ,test_hive_1461 as test_hive_1461 + ,test_hive_1460 as test_hive_1460 + ,test_hive_1459 as test_hive_1459 + ,test_hive_1462 as test_hive_1462 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1468 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1466 +PREHOOK: Input: default@test_hive_1468 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1467 +POSTHOOK: query: create view test_hive_1467 +as +select + test_hive_1457 as test_hive_1457 + ,test_hive_1453 as test_hive_1453 + ,test_hive_1458 as test_hive_1458 + ,test_hive_415 as test_hive_415 + ,test_hive_1456 as test_hive_1456 + ,test_hive_1455 as test_hive_1455 + ,test_hive_1454 as test_hive_1454 + ,test_hive_1461 as test_hive_1461 + ,test_hive_1460 as test_hive_1460 + ,test_hive_1459 as test_hive_1459 + ,test_hive_1462 as test_hive_1462 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1468 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1466 +POSTHOOK: Input: default@test_hive_1468 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1467 +POSTHOOK: Lineage: test_hive_1467.creation_date EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.ds EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.ds_ts SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.source_file_name SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1453 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1453, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1454 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1454, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1455 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1455, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1456 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1456, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1457 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1457, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1458 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1458, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1459 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1459, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1460 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1460, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1461 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1461, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_1462 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1462, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.test_hive_415 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_415, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1467.ts EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1464 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1464 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1464 +as +select t1.* +from test_hive_1467 t1 +inner join test_hive_1465 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1465 +PREHOOK: Input: default@test_hive_1466 +PREHOOK: Input: default@test_hive_1467 +PREHOOK: Input: default@test_hive_1468 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1464 +POSTHOOK: query: create view test_hive_1464 +as +select t1.* +from test_hive_1467 t1 +inner join test_hive_1465 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1465 +POSTHOOK: Input: default@test_hive_1466 +POSTHOOK: Input: default@test_hive_1467 +POSTHOOK: Input: default@test_hive_1468 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1464 +POSTHOOK: Lineage: test_hive_1464.creation_date EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.ds EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.ds_ts SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.source_file_name SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1453 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1453, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1454 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1454, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1455 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1455, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1456 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1456, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1457 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1457, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1458 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1458, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1459 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1459, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1460 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1460, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1461 SIMPLE [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1461, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_1462 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_1462, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.test_hive_415 EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:test_hive_415, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1464.ts EXPRESSION [(test_hive_1466)test_hive_1466.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1477 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1477 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1477 +( + test_hive_1471 string + ,test_hive_1469 string + ,test_hive_1472 string + ,test_hive_416 string + ,test_hive_1470 string + ,test_hive_1475 string + ,test_hive_1474 string + ,test_hive_1473 string + ,test_hive_1476 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1477 +POSTHOOK: query: create table test_hive_1477 +( + test_hive_1471 string + ,test_hive_1469 string + ,test_hive_1472 string + ,test_hive_416 string + ,test_hive_1470 string + ,test_hive_1475 string + ,test_hive_1474 string + ,test_hive_1473 string + ,test_hive_1476 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1477 +PREHOOK: query: create table if not exists test_hive_1480 +( + test_hive_1471 string + ,test_hive_1469 string + ,test_hive_1472 string + ,test_hive_416 string + ,test_hive_1470 string + ,test_hive_1475 string + ,test_hive_1474 string + ,test_hive_1473 string + ,test_hive_1476 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1480 +POSTHOOK: query: create table if not exists test_hive_1480 +( + test_hive_1471 string + ,test_hive_1469 string + ,test_hive_1472 string + ,test_hive_416 string + ,test_hive_1470 string + ,test_hive_1475 string + ,test_hive_1474 string + ,test_hive_1473 string + ,test_hive_1476 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1480 +PREHOOK: query: drop table if exists test_hive_1479 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1479 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1479 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1479 +POSTHOOK: query: create table if not exists test_hive_1479 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1479 +PREHOOK: query: drop view if exists test_hive_1482 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1482 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1482 +as +select + cast(test_hive_1471 as int) as test_hive_1471 + ,cast(test_hive_1469 as int) as test_hive_1469 + ,cast(test_hive_1472 as int) as test_hive_1472 + ,cast(test_hive_416 as string) as test_hive_416 + ,cast(test_hive_1470 as string) as test_hive_1470 + ,cast(test_hive_1475 as string) as test_hive_1475 + ,cast(test_hive_1474 as string) as test_hive_1474 + ,cast(test_hive_1473 as string) as test_hive_1473 + ,cast(from_unixtime(unix_timestamp(test_hive_1476,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1476 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1480 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1480 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1482 +POSTHOOK: query: create view if not exists test_hive_1482 +as +select + cast(test_hive_1471 as int) as test_hive_1471 + ,cast(test_hive_1469 as int) as test_hive_1469 + ,cast(test_hive_1472 as int) as test_hive_1472 + ,cast(test_hive_416 as string) as test_hive_416 + ,cast(test_hive_1470 as string) as test_hive_1470 + ,cast(test_hive_1475 as string) as test_hive_1475 + ,cast(test_hive_1474 as string) as test_hive_1474 + ,cast(test_hive_1473 as string) as test_hive_1473 + ,cast(from_unixtime(unix_timestamp(test_hive_1476,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1476 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1480 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1480 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1482 +POSTHOOK: Lineage: test_hive_1482.creation_date EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.ds EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.ds_ts SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.source_file_name SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1469 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1469, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1470 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1470, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1471 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1471, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1472 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1472, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1473 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1473, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1474 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1474, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1475 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1475, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_1476 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1476, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.test_hive_416 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_416, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1482.ts EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1481 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1481 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1481 +as +select + test_hive_1471 as test_hive_1471 + ,test_hive_1469 as test_hive_1469 + ,test_hive_1472 as test_hive_1472 + ,test_hive_416 as test_hive_416 + ,test_hive_1470 as test_hive_1470 + ,test_hive_1475 as test_hive_1475 + ,test_hive_1474 as test_hive_1474 + ,test_hive_1473 as test_hive_1473 + ,test_hive_1476 as test_hive_1476 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1482 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1480 +PREHOOK: Input: default@test_hive_1482 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1481 +POSTHOOK: query: create view test_hive_1481 +as +select + test_hive_1471 as test_hive_1471 + ,test_hive_1469 as test_hive_1469 + ,test_hive_1472 as test_hive_1472 + ,test_hive_416 as test_hive_416 + ,test_hive_1470 as test_hive_1470 + ,test_hive_1475 as test_hive_1475 + ,test_hive_1474 as test_hive_1474 + ,test_hive_1473 as test_hive_1473 + ,test_hive_1476 as test_hive_1476 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1482 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1480 +POSTHOOK: Input: default@test_hive_1482 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1481 +POSTHOOK: Lineage: test_hive_1481.creation_date EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.ds EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.ds_ts SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.source_file_name SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1469 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1469, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1470 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1470, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1471 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1471, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1472 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1472, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1473 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1473, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1474 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1474, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1475 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1475, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_1476 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1476, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.test_hive_416 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_416, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1481.ts EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1478 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1478 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1478 +as +select t1.* +from test_hive_1481 t1 +inner join test_hive_1479 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1479 +PREHOOK: Input: default@test_hive_1480 +PREHOOK: Input: default@test_hive_1481 +PREHOOK: Input: default@test_hive_1482 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1478 +POSTHOOK: query: create view test_hive_1478 +as +select t1.* +from test_hive_1481 t1 +inner join test_hive_1479 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1479 +POSTHOOK: Input: default@test_hive_1480 +POSTHOOK: Input: default@test_hive_1481 +POSTHOOK: Input: default@test_hive_1482 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1478 +POSTHOOK: Lineage: test_hive_1478.creation_date EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.ds EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.ds_ts SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.source_file_name SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1469 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1469, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1470 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1470, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1471 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1471, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1472 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1472, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1473 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1473, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1474 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1474, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1475 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1475, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_1476 EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_1476, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.test_hive_416 SIMPLE [(test_hive_1480)test_hive_1480.FieldSchema(name:test_hive_416, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1478.ts EXPRESSION [(test_hive_1480)test_hive_1480.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1491 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1491 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1491 +( + test_hive_1485 string + ,test_hive_1483 string + ,test_hive_1486 string + ,test_hive_417 string + ,test_hive_1484 string + ,test_hive_1489 string + ,test_hive_1488 string + ,test_hive_1487 string + ,test_hive_1490 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1491 +POSTHOOK: query: create table test_hive_1491 +( + test_hive_1485 string + ,test_hive_1483 string + ,test_hive_1486 string + ,test_hive_417 string + ,test_hive_1484 string + ,test_hive_1489 string + ,test_hive_1488 string + ,test_hive_1487 string + ,test_hive_1490 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1491 +PREHOOK: query: create table if not exists test_hive_1494 +( + test_hive_1485 string + ,test_hive_1483 string + ,test_hive_1486 string + ,test_hive_417 string + ,test_hive_1484 string + ,test_hive_1489 string + ,test_hive_1488 string + ,test_hive_1487 string + ,test_hive_1490 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1494 +POSTHOOK: query: create table if not exists test_hive_1494 +( + test_hive_1485 string + ,test_hive_1483 string + ,test_hive_1486 string + ,test_hive_417 string + ,test_hive_1484 string + ,test_hive_1489 string + ,test_hive_1488 string + ,test_hive_1487 string + ,test_hive_1490 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1494 +PREHOOK: query: drop table if exists test_hive_1493 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1493 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1493 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1493 +POSTHOOK: query: create table if not exists test_hive_1493 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1493 +PREHOOK: query: drop view if exists test_hive_1496 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1496 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1496 +as +select + cast(test_hive_1485 as int) as test_hive_1485 + ,cast(test_hive_1483 as int) as test_hive_1483 + ,cast(test_hive_1486 as int) as test_hive_1486 + ,cast(test_hive_417 as string) as test_hive_417 + ,cast(test_hive_1484 as string) as test_hive_1484 + ,cast(test_hive_1489 as string) as test_hive_1489 + ,cast(test_hive_1488 as string) as test_hive_1488 + ,cast(test_hive_1487 as string) as test_hive_1487 + ,cast(from_unixtime(unix_timestamp(test_hive_1490,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1490 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1494 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1494 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1496 +POSTHOOK: query: create view if not exists test_hive_1496 +as +select + cast(test_hive_1485 as int) as test_hive_1485 + ,cast(test_hive_1483 as int) as test_hive_1483 + ,cast(test_hive_1486 as int) as test_hive_1486 + ,cast(test_hive_417 as string) as test_hive_417 + ,cast(test_hive_1484 as string) as test_hive_1484 + ,cast(test_hive_1489 as string) as test_hive_1489 + ,cast(test_hive_1488 as string) as test_hive_1488 + ,cast(test_hive_1487 as string) as test_hive_1487 + ,cast(from_unixtime(unix_timestamp(test_hive_1490,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1490 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1494 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1494 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1496 +POSTHOOK: Lineage: test_hive_1496.creation_date EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.ds EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.ds_ts SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.source_file_name SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1483 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1483, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1484 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1484, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1485 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1485, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1486 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1486, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1487 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1487, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1488 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1488, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1489 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1489, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_1490 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1490, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.test_hive_417 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_417, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1496.ts EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1495 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1495 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1495 +as +select + test_hive_1485 as test_hive_1485 + ,test_hive_1483 as test_hive_1483 + ,test_hive_1486 as test_hive_1486 + ,test_hive_417 as test_hive_417 + ,test_hive_1484 as test_hive_1484 + ,test_hive_1489 as test_hive_1489 + ,test_hive_1488 as test_hive_1488 + ,test_hive_1487 as test_hive_1487 + ,test_hive_1490 as test_hive_1490 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1496 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1494 +PREHOOK: Input: default@test_hive_1496 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1495 +POSTHOOK: query: create view test_hive_1495 +as +select + test_hive_1485 as test_hive_1485 + ,test_hive_1483 as test_hive_1483 + ,test_hive_1486 as test_hive_1486 + ,test_hive_417 as test_hive_417 + ,test_hive_1484 as test_hive_1484 + ,test_hive_1489 as test_hive_1489 + ,test_hive_1488 as test_hive_1488 + ,test_hive_1487 as test_hive_1487 + ,test_hive_1490 as test_hive_1490 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1496 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1494 +POSTHOOK: Input: default@test_hive_1496 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1495 +POSTHOOK: Lineage: test_hive_1495.creation_date EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.ds EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.ds_ts SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.source_file_name SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1483 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1483, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1484 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1484, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1485 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1485, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1486 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1486, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1487 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1487, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1488 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1488, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1489 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1489, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_1490 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1490, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.test_hive_417 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_417, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1495.ts EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1492 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1492 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1492 +as +select t1.* +from test_hive_1495 t1 +inner join test_hive_1493 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1493 +PREHOOK: Input: default@test_hive_1494 +PREHOOK: Input: default@test_hive_1495 +PREHOOK: Input: default@test_hive_1496 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1492 +POSTHOOK: query: create view test_hive_1492 +as +select t1.* +from test_hive_1495 t1 +inner join test_hive_1493 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1493 +POSTHOOK: Input: default@test_hive_1494 +POSTHOOK: Input: default@test_hive_1495 +POSTHOOK: Input: default@test_hive_1496 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1492 +POSTHOOK: Lineage: test_hive_1492.creation_date EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.ds EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.ds_ts SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.source_file_name SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1483 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1483, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1484 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1484, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1485 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1485, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1486 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1486, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1487 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1487, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1488 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1488, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1489 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1489, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_1490 EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_1490, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.test_hive_417 SIMPLE [(test_hive_1494)test_hive_1494.FieldSchema(name:test_hive_417, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1492.ts EXPRESSION [(test_hive_1494)test_hive_1494.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1507 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1507 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1507 +( + test_hive_1501 string + ,test_hive_1497 string + ,test_hive_1502 string + ,test_hive_418 string + ,test_hive_1500 string + ,test_hive_1499 string + ,test_hive_1498 string + ,test_hive_1505 string + ,test_hive_1504 string + ,test_hive_1503 string + ,test_hive_1506 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1507 +POSTHOOK: query: create table test_hive_1507 +( + test_hive_1501 string + ,test_hive_1497 string + ,test_hive_1502 string + ,test_hive_418 string + ,test_hive_1500 string + ,test_hive_1499 string + ,test_hive_1498 string + ,test_hive_1505 string + ,test_hive_1504 string + ,test_hive_1503 string + ,test_hive_1506 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1507 +PREHOOK: query: create table if not exists test_hive_1510 +( + test_hive_1501 string + ,test_hive_1497 string + ,test_hive_1502 string + ,test_hive_418 string + ,test_hive_1500 string + ,test_hive_1499 string + ,test_hive_1498 string + ,test_hive_1505 string + ,test_hive_1504 string + ,test_hive_1503 string + ,test_hive_1506 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1510 +POSTHOOK: query: create table if not exists test_hive_1510 +( + test_hive_1501 string + ,test_hive_1497 string + ,test_hive_1502 string + ,test_hive_418 string + ,test_hive_1500 string + ,test_hive_1499 string + ,test_hive_1498 string + ,test_hive_1505 string + ,test_hive_1504 string + ,test_hive_1503 string + ,test_hive_1506 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1510 +PREHOOK: query: drop table if exists test_hive_1509 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1509 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1509 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1509 +POSTHOOK: query: create table if not exists test_hive_1509 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1509 +PREHOOK: query: drop view if exists test_hive_1512 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1512 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1512 +as +select + cast(test_hive_1501 as int) as test_hive_1501 + ,cast(test_hive_1497 as int) as test_hive_1497 + ,cast(test_hive_1502 as int) as test_hive_1502 + ,cast(test_hive_418 as decimal) as test_hive_418 + ,cast(test_hive_1500 as string) as test_hive_1500 + ,cast(from_unixtime(unix_timestamp(test_hive_1499,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1499 + ,cast(from_unixtime(unix_timestamp(test_hive_1498,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1498 + ,cast(test_hive_1505 as string) as test_hive_1505 + ,cast(test_hive_1504 as string) as test_hive_1504 + ,cast(test_hive_1503 as string) as test_hive_1503 + ,cast(from_unixtime(unix_timestamp(test_hive_1506,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1506 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1510 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1510 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1512 +POSTHOOK: query: create view if not exists test_hive_1512 +as +select + cast(test_hive_1501 as int) as test_hive_1501 + ,cast(test_hive_1497 as int) as test_hive_1497 + ,cast(test_hive_1502 as int) as test_hive_1502 + ,cast(test_hive_418 as decimal) as test_hive_418 + ,cast(test_hive_1500 as string) as test_hive_1500 + ,cast(from_unixtime(unix_timestamp(test_hive_1499,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1499 + ,cast(from_unixtime(unix_timestamp(test_hive_1498,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1498 + ,cast(test_hive_1505 as string) as test_hive_1505 + ,cast(test_hive_1504 as string) as test_hive_1504 + ,cast(test_hive_1503 as string) as test_hive_1503 + ,cast(from_unixtime(unix_timestamp(test_hive_1506,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1506 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1510 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1510 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1512 +POSTHOOK: Lineage: test_hive_1512.creation_date EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.ds EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.ds_ts SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.source_file_name SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1497 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1497, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1498 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1498, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1499 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1499, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1500 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1500, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1501 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1501, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1502 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1502, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1503 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1503, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1504 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1504, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1505 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1505, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_1506 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1506, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.test_hive_418 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_418, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1512.ts EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1511 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1511 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1511 +as +select + test_hive_1501 as test_hive_1501 + ,test_hive_1497 as test_hive_1497 + ,test_hive_1502 as test_hive_1502 + ,test_hive_418 as test_hive_418 + ,test_hive_1500 as test_hive_1500 + ,test_hive_1499 as test_hive_1499 + ,test_hive_1498 as test_hive_1498 + ,test_hive_1505 as test_hive_1505 + ,test_hive_1504 as test_hive_1504 + ,test_hive_1503 as test_hive_1503 + ,test_hive_1506 as test_hive_1506 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1512 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1510 +PREHOOK: Input: default@test_hive_1512 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1511 +POSTHOOK: query: create view test_hive_1511 +as +select + test_hive_1501 as test_hive_1501 + ,test_hive_1497 as test_hive_1497 + ,test_hive_1502 as test_hive_1502 + ,test_hive_418 as test_hive_418 + ,test_hive_1500 as test_hive_1500 + ,test_hive_1499 as test_hive_1499 + ,test_hive_1498 as test_hive_1498 + ,test_hive_1505 as test_hive_1505 + ,test_hive_1504 as test_hive_1504 + ,test_hive_1503 as test_hive_1503 + ,test_hive_1506 as test_hive_1506 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1512 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1510 +POSTHOOK: Input: default@test_hive_1512 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1511 +POSTHOOK: Lineage: test_hive_1511.creation_date EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.ds EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.ds_ts SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.source_file_name SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1497 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1497, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1498 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1498, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1499 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1499, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1500 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1500, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1501 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1501, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1502 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1502, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1503 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1503, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1504 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1504, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1505 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1505, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_1506 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1506, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.test_hive_418 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_418, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1511.ts EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1508 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1508 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1508 +as +select t1.* +from test_hive_1511 t1 +inner join test_hive_1509 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1509 +PREHOOK: Input: default@test_hive_1510 +PREHOOK: Input: default@test_hive_1511 +PREHOOK: Input: default@test_hive_1512 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1508 +POSTHOOK: query: create view test_hive_1508 +as +select t1.* +from test_hive_1511 t1 +inner join test_hive_1509 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1509 +POSTHOOK: Input: default@test_hive_1510 +POSTHOOK: Input: default@test_hive_1511 +POSTHOOK: Input: default@test_hive_1512 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1508 +POSTHOOK: Lineage: test_hive_1508.creation_date EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.ds EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.ds_ts SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.source_file_name SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1497 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1497, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1498 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1498, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1499 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1499, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1500 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1500, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1501 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1501, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1502 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1502, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1503 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1503, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1504 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1504, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1505 SIMPLE [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1505, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_1506 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_1506, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.test_hive_418 EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:test_hive_418, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1508.ts EXPRESSION [(test_hive_1510)test_hive_1510.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1521 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1521 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1521 +( + test_hive_1515 string + ,test_hive_1513 string + ,test_hive_1516 string + ,test_hive_419 string + ,test_hive_1514 string + ,test_hive_1519 string + ,test_hive_1518 string + ,test_hive_1517 string + ,test_hive_1520 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1521 +POSTHOOK: query: create table test_hive_1521 +( + test_hive_1515 string + ,test_hive_1513 string + ,test_hive_1516 string + ,test_hive_419 string + ,test_hive_1514 string + ,test_hive_1519 string + ,test_hive_1518 string + ,test_hive_1517 string + ,test_hive_1520 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1521 +PREHOOK: query: create table if not exists test_hive_1524 +( + test_hive_1515 string + ,test_hive_1513 string + ,test_hive_1516 string + ,test_hive_419 string + ,test_hive_1514 string + ,test_hive_1519 string + ,test_hive_1518 string + ,test_hive_1517 string + ,test_hive_1520 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1524 +POSTHOOK: query: create table if not exists test_hive_1524 +( + test_hive_1515 string + ,test_hive_1513 string + ,test_hive_1516 string + ,test_hive_419 string + ,test_hive_1514 string + ,test_hive_1519 string + ,test_hive_1518 string + ,test_hive_1517 string + ,test_hive_1520 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1524 +PREHOOK: query: drop table if exists test_hive_1523 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1523 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1523 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1523 +POSTHOOK: query: create table if not exists test_hive_1523 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1523 +PREHOOK: query: drop view if exists test_hive_1526 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1526 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1526 +as +select + cast(test_hive_1515 as int) as test_hive_1515 + ,cast(test_hive_1513 as int) as test_hive_1513 + ,cast(test_hive_1516 as int) as test_hive_1516 + ,cast(test_hive_419 as string) as test_hive_419 + ,cast(test_hive_1514 as string) as test_hive_1514 + ,cast(test_hive_1519 as string) as test_hive_1519 + ,cast(test_hive_1518 as string) as test_hive_1518 + ,cast(test_hive_1517 as string) as test_hive_1517 + ,cast(from_unixtime(unix_timestamp(test_hive_1520,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1520 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1524 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1524 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1526 +POSTHOOK: query: create view if not exists test_hive_1526 +as +select + cast(test_hive_1515 as int) as test_hive_1515 + ,cast(test_hive_1513 as int) as test_hive_1513 + ,cast(test_hive_1516 as int) as test_hive_1516 + ,cast(test_hive_419 as string) as test_hive_419 + ,cast(test_hive_1514 as string) as test_hive_1514 + ,cast(test_hive_1519 as string) as test_hive_1519 + ,cast(test_hive_1518 as string) as test_hive_1518 + ,cast(test_hive_1517 as string) as test_hive_1517 + ,cast(from_unixtime(unix_timestamp(test_hive_1520,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1520 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1524 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1524 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1526 +POSTHOOK: Lineage: test_hive_1526.creation_date EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.ds EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.ds_ts SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.source_file_name SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1513 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1513, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1514 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1514, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1515 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1515, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1516 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1516, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1517 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1517, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1518 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1518, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1519 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1519, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_1520 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1520, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.test_hive_419 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_419, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1526.ts EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1525 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1525 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1525 +as +select + test_hive_1515 as test_hive_1515 + ,test_hive_1513 as test_hive_1513 + ,test_hive_1516 as test_hive_1516 + ,test_hive_419 as test_hive_419 + ,test_hive_1514 as test_hive_1514 + ,test_hive_1519 as test_hive_1519 + ,test_hive_1518 as test_hive_1518 + ,test_hive_1517 as test_hive_1517 + ,test_hive_1520 as test_hive_1520 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1526 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1524 +PREHOOK: Input: default@test_hive_1526 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1525 +POSTHOOK: query: create view test_hive_1525 +as +select + test_hive_1515 as test_hive_1515 + ,test_hive_1513 as test_hive_1513 + ,test_hive_1516 as test_hive_1516 + ,test_hive_419 as test_hive_419 + ,test_hive_1514 as test_hive_1514 + ,test_hive_1519 as test_hive_1519 + ,test_hive_1518 as test_hive_1518 + ,test_hive_1517 as test_hive_1517 + ,test_hive_1520 as test_hive_1520 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1526 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1524 +POSTHOOK: Input: default@test_hive_1526 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1525 +POSTHOOK: Lineage: test_hive_1525.creation_date EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.ds EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.ds_ts SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.source_file_name SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1513 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1513, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1514 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1514, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1515 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1515, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1516 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1516, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1517 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1517, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1518 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1518, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1519 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1519, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_1520 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1520, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.test_hive_419 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_419, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1525.ts EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1522 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1522 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1522 +as +select t1.* +from test_hive_1525 t1 +inner join test_hive_1523 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1523 +PREHOOK: Input: default@test_hive_1524 +PREHOOK: Input: default@test_hive_1525 +PREHOOK: Input: default@test_hive_1526 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1522 +POSTHOOK: query: create view test_hive_1522 +as +select t1.* +from test_hive_1525 t1 +inner join test_hive_1523 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1523 +POSTHOOK: Input: default@test_hive_1524 +POSTHOOK: Input: default@test_hive_1525 +POSTHOOK: Input: default@test_hive_1526 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1522 +POSTHOOK: Lineage: test_hive_1522.creation_date EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.ds EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.ds_ts SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.source_file_name SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1513 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1513, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1514 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1514, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1515 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1515, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1516 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1516, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1517 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1517, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1518 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1518, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1519 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1519, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_1520 EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_1520, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.test_hive_419 SIMPLE [(test_hive_1524)test_hive_1524.FieldSchema(name:test_hive_419, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1522.ts EXPRESSION [(test_hive_1524)test_hive_1524.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1537 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1537 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1537 +( + test_hive_1531 string + ,test_hive_1527 string + ,test_hive_1532 string + ,test_hive_420 string + ,test_hive_1530 string + ,test_hive_1529 string + ,test_hive_1528 string + ,test_hive_1535 string + ,test_hive_1534 string + ,test_hive_1533 string + ,test_hive_1536 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1537 +POSTHOOK: query: create table test_hive_1537 +( + test_hive_1531 string + ,test_hive_1527 string + ,test_hive_1532 string + ,test_hive_420 string + ,test_hive_1530 string + ,test_hive_1529 string + ,test_hive_1528 string + ,test_hive_1535 string + ,test_hive_1534 string + ,test_hive_1533 string + ,test_hive_1536 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1537 +PREHOOK: query: create table if not exists test_hive_1540 +( + test_hive_1531 string + ,test_hive_1527 string + ,test_hive_1532 string + ,test_hive_420 string + ,test_hive_1530 string + ,test_hive_1529 string + ,test_hive_1528 string + ,test_hive_1535 string + ,test_hive_1534 string + ,test_hive_1533 string + ,test_hive_1536 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1540 +POSTHOOK: query: create table if not exists test_hive_1540 +( + test_hive_1531 string + ,test_hive_1527 string + ,test_hive_1532 string + ,test_hive_420 string + ,test_hive_1530 string + ,test_hive_1529 string + ,test_hive_1528 string + ,test_hive_1535 string + ,test_hive_1534 string + ,test_hive_1533 string + ,test_hive_1536 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1540 +PREHOOK: query: drop table if exists test_hive_1539 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1539 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1539 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1539 +POSTHOOK: query: create table if not exists test_hive_1539 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1539 +PREHOOK: query: drop view if exists test_hive_1542 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1542 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1542 +as +select + cast(test_hive_1531 as int) as test_hive_1531 + ,cast(test_hive_1527 as int) as test_hive_1527 + ,cast(test_hive_1532 as int) as test_hive_1532 + ,cast(test_hive_420 as decimal) as test_hive_420 + ,cast(test_hive_1530 as string) as test_hive_1530 + ,cast(from_unixtime(unix_timestamp(test_hive_1529,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1529 + ,cast(from_unixtime(unix_timestamp(test_hive_1528,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1528 + ,cast(test_hive_1535 as string) as test_hive_1535 + ,cast(test_hive_1534 as string) as test_hive_1534 + ,cast(test_hive_1533 as string) as test_hive_1533 + ,cast(from_unixtime(unix_timestamp(test_hive_1536,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1536 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1540 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1540 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1542 +POSTHOOK: query: create view if not exists test_hive_1542 +as +select + cast(test_hive_1531 as int) as test_hive_1531 + ,cast(test_hive_1527 as int) as test_hive_1527 + ,cast(test_hive_1532 as int) as test_hive_1532 + ,cast(test_hive_420 as decimal) as test_hive_420 + ,cast(test_hive_1530 as string) as test_hive_1530 + ,cast(from_unixtime(unix_timestamp(test_hive_1529,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1529 + ,cast(from_unixtime(unix_timestamp(test_hive_1528,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1528 + ,cast(test_hive_1535 as string) as test_hive_1535 + ,cast(test_hive_1534 as string) as test_hive_1534 + ,cast(test_hive_1533 as string) as test_hive_1533 + ,cast(from_unixtime(unix_timestamp(test_hive_1536,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1536 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1540 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1540 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1542 +POSTHOOK: Lineage: test_hive_1542.creation_date EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.ds EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.ds_ts SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.source_file_name SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1527 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1527, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1528 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1528, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1529 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1529, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1530 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1530, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1531 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1531, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1532 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1532, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1533 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1533, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1534 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1534, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1535 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1535, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_1536 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1536, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.test_hive_420 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_420, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1542.ts EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1541 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1541 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1541 +as +select + test_hive_1531 as test_hive_1531 + ,test_hive_1527 as test_hive_1527 + ,test_hive_1532 as test_hive_1532 + ,test_hive_420 as test_hive_420 + ,test_hive_1530 as test_hive_1530 + ,test_hive_1529 as test_hive_1529 + ,test_hive_1528 as test_hive_1528 + ,test_hive_1535 as test_hive_1535 + ,test_hive_1534 as test_hive_1534 + ,test_hive_1533 as test_hive_1533 + ,test_hive_1536 as test_hive_1536 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1542 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1540 +PREHOOK: Input: default@test_hive_1542 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1541 +POSTHOOK: query: create view test_hive_1541 +as +select + test_hive_1531 as test_hive_1531 + ,test_hive_1527 as test_hive_1527 + ,test_hive_1532 as test_hive_1532 + ,test_hive_420 as test_hive_420 + ,test_hive_1530 as test_hive_1530 + ,test_hive_1529 as test_hive_1529 + ,test_hive_1528 as test_hive_1528 + ,test_hive_1535 as test_hive_1535 + ,test_hive_1534 as test_hive_1534 + ,test_hive_1533 as test_hive_1533 + ,test_hive_1536 as test_hive_1536 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1542 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1540 +POSTHOOK: Input: default@test_hive_1542 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1541 +POSTHOOK: Lineage: test_hive_1541.creation_date EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.ds EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.ds_ts SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.source_file_name SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1527 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1527, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1528 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1528, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1529 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1529, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1530 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1530, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1531 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1531, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1532 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1532, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1533 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1533, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1534 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1534, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1535 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1535, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_1536 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1536, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.test_hive_420 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_420, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1541.ts EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1538 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1538 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1538 +as +select t1.* +from test_hive_1541 t1 +inner join test_hive_1539 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1539 +PREHOOK: Input: default@test_hive_1540 +PREHOOK: Input: default@test_hive_1541 +PREHOOK: Input: default@test_hive_1542 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1538 +POSTHOOK: query: create view test_hive_1538 +as +select t1.* +from test_hive_1541 t1 +inner join test_hive_1539 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1539 +POSTHOOK: Input: default@test_hive_1540 +POSTHOOK: Input: default@test_hive_1541 +POSTHOOK: Input: default@test_hive_1542 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1538 +POSTHOOK: Lineage: test_hive_1538.creation_date EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.ds EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.ds_ts SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.source_file_name SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1527 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1527, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1528 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1528, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1529 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1529, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1530 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1530, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1531 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1531, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1532 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1532, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1533 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1533, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1534 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1534, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1535 SIMPLE [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1535, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_1536 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_1536, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.test_hive_420 EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:test_hive_420, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1538.ts EXPRESSION [(test_hive_1540)test_hive_1540.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1849 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1849 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1849 +( + test_hive_1845 string + ,test_hive_1843 string + ,test_hive_1846 string + ,test_hive_445 string + ,test_hive_1844 string + ,test_hive_1847 string + ,test_hive_1848 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1849 +POSTHOOK: query: create table test_hive_1849 +( + test_hive_1845 string + ,test_hive_1843 string + ,test_hive_1846 string + ,test_hive_445 string + ,test_hive_1844 string + ,test_hive_1847 string + ,test_hive_1848 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1849 +PREHOOK: query: create table if not exists test_hive_1852 +( + test_hive_1845 string + ,test_hive_1843 string + ,test_hive_1846 string + ,test_hive_445 string + ,test_hive_1844 string + ,test_hive_1847 string + ,test_hive_1848 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1852 +POSTHOOK: query: create table if not exists test_hive_1852 +( + test_hive_1845 string + ,test_hive_1843 string + ,test_hive_1846 string + ,test_hive_445 string + ,test_hive_1844 string + ,test_hive_1847 string + ,test_hive_1848 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1852 +PREHOOK: query: drop table if exists test_hive_1851 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1851 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1851 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1851 +POSTHOOK: query: create table if not exists test_hive_1851 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1851 +PREHOOK: query: drop view if exists test_hive_1854 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1854 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1854 +as +select + cast(test_hive_1845 as int) as test_hive_1845 + ,cast(test_hive_1843 as int) as test_hive_1843 + ,cast(test_hive_1846 as int) as test_hive_1846 + ,cast(test_hive_445 as decimal) as test_hive_445 + ,cast(test_hive_1844 as string) as test_hive_1844 + ,cast(test_hive_1847 as string) as test_hive_1847 + ,cast(from_unixtime(unix_timestamp(test_hive_1848,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1848 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1852 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1852 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1854 +POSTHOOK: query: create view if not exists test_hive_1854 +as +select + cast(test_hive_1845 as int) as test_hive_1845 + ,cast(test_hive_1843 as int) as test_hive_1843 + ,cast(test_hive_1846 as int) as test_hive_1846 + ,cast(test_hive_445 as decimal) as test_hive_445 + ,cast(test_hive_1844 as string) as test_hive_1844 + ,cast(test_hive_1847 as string) as test_hive_1847 + ,cast(from_unixtime(unix_timestamp(test_hive_1848,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1848 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1852 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1852 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1854 +POSTHOOK: Lineage: test_hive_1854.creation_date EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.ds EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.ds_ts SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.source_file_name SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_1843 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1843, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_1844 SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1844, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_1845 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1845, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_1846 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1846, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_1847 SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1847, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_1848 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1848, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.test_hive_445 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_445, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1854.ts EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1853 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1853 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1853 +as +select + test_hive_1845 as test_hive_1845 + ,test_hive_1843 as test_hive_1843 + ,test_hive_1846 as test_hive_1846 + ,test_hive_445 as test_hive_445 + ,test_hive_1844 as test_hive_1844 + ,test_hive_1847 as test_hive_1847 + ,test_hive_1848 as test_hive_1848 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1854 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1852 +PREHOOK: Input: default@test_hive_1854 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1853 +POSTHOOK: query: create view test_hive_1853 +as +select + test_hive_1845 as test_hive_1845 + ,test_hive_1843 as test_hive_1843 + ,test_hive_1846 as test_hive_1846 + ,test_hive_445 as test_hive_445 + ,test_hive_1844 as test_hive_1844 + ,test_hive_1847 as test_hive_1847 + ,test_hive_1848 as test_hive_1848 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1854 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1852 +POSTHOOK: Input: default@test_hive_1854 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1853 +POSTHOOK: Lineage: test_hive_1853.creation_date EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.ds EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.ds_ts SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.source_file_name SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_1843 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1843, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_1844 SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1844, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_1845 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1845, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_1846 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1846, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_1847 SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1847, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_1848 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1848, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.test_hive_445 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_445, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1853.ts EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1850 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1850 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1850 +as +select t1.* +from test_hive_1853 t1 +inner join test_hive_1851 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1851 +PREHOOK: Input: default@test_hive_1852 +PREHOOK: Input: default@test_hive_1853 +PREHOOK: Input: default@test_hive_1854 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1850 +POSTHOOK: query: create view test_hive_1850 +as +select t1.* +from test_hive_1853 t1 +inner join test_hive_1851 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1851 +POSTHOOK: Input: default@test_hive_1852 +POSTHOOK: Input: default@test_hive_1853 +POSTHOOK: Input: default@test_hive_1854 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1850 +POSTHOOK: Lineage: test_hive_1850.creation_date EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.ds EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.ds_ts SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.source_file_name SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_1843 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1843, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_1844 SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1844, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_1845 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1845, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_1846 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1846, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_1847 SIMPLE [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1847, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_1848 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_1848, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.test_hive_445 EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:test_hive_445, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1850.ts EXPRESSION [(test_hive_1852)test_hive_1852.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1861 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1861 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1861 +( + test_hive_1857 string + ,test_hive_1855 string + ,test_hive_1858 string + ,test_hive_446 string + ,test_hive_1856 string + ,test_hive_1859 string + ,test_hive_1860 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1861 +POSTHOOK: query: create table test_hive_1861 +( + test_hive_1857 string + ,test_hive_1855 string + ,test_hive_1858 string + ,test_hive_446 string + ,test_hive_1856 string + ,test_hive_1859 string + ,test_hive_1860 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1861 +PREHOOK: query: create table if not exists test_hive_1864 +( + test_hive_1857 string + ,test_hive_1855 string + ,test_hive_1858 string + ,test_hive_446 string + ,test_hive_1856 string + ,test_hive_1859 string + ,test_hive_1860 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1864 +POSTHOOK: query: create table if not exists test_hive_1864 +( + test_hive_1857 string + ,test_hive_1855 string + ,test_hive_1858 string + ,test_hive_446 string + ,test_hive_1856 string + ,test_hive_1859 string + ,test_hive_1860 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1864 +PREHOOK: query: drop table if exists test_hive_1863 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1863 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1863 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1863 +POSTHOOK: query: create table if not exists test_hive_1863 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1863 +PREHOOK: query: drop view if exists test_hive_1866 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1866 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1866 +as +select + cast(test_hive_1857 as int) as test_hive_1857 + ,cast(test_hive_1855 as int) as test_hive_1855 + ,cast(test_hive_1858 as int) as test_hive_1858 + ,cast(test_hive_446 as string) as test_hive_446 + ,cast(test_hive_1856 as string) as test_hive_1856 + ,cast(test_hive_1859 as string) as test_hive_1859 + ,cast(from_unixtime(unix_timestamp(test_hive_1860,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1860 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1864 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1864 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1866 +POSTHOOK: query: create view if not exists test_hive_1866 +as +select + cast(test_hive_1857 as int) as test_hive_1857 + ,cast(test_hive_1855 as int) as test_hive_1855 + ,cast(test_hive_1858 as int) as test_hive_1858 + ,cast(test_hive_446 as string) as test_hive_446 + ,cast(test_hive_1856 as string) as test_hive_1856 + ,cast(test_hive_1859 as string) as test_hive_1859 + ,cast(from_unixtime(unix_timestamp(test_hive_1860,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1860 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1864 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1864 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1866 +POSTHOOK: Lineage: test_hive_1866.creation_date EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.ds EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.ds_ts SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.source_file_name SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_1855 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1855, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_1856 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1856, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_1857 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1857, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_1858 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1858, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_1859 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1859, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_1860 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1860, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.test_hive_446 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_446, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1866.ts EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1865 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1865 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1865 +as +select + test_hive_1857 as test_hive_1857 + ,test_hive_1855 as test_hive_1855 + ,test_hive_1858 as test_hive_1858 + ,test_hive_446 as test_hive_446 + ,test_hive_1856 as test_hive_1856 + ,test_hive_1859 as test_hive_1859 + ,test_hive_1860 as test_hive_1860 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1866 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1864 +PREHOOK: Input: default@test_hive_1866 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1865 +POSTHOOK: query: create view test_hive_1865 +as +select + test_hive_1857 as test_hive_1857 + ,test_hive_1855 as test_hive_1855 + ,test_hive_1858 as test_hive_1858 + ,test_hive_446 as test_hive_446 + ,test_hive_1856 as test_hive_1856 + ,test_hive_1859 as test_hive_1859 + ,test_hive_1860 as test_hive_1860 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1866 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1864 +POSTHOOK: Input: default@test_hive_1866 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1865 +POSTHOOK: Lineage: test_hive_1865.creation_date EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.ds EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.ds_ts SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.source_file_name SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_1855 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1855, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_1856 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1856, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_1857 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1857, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_1858 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1858, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_1859 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1859, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_1860 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1860, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.test_hive_446 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_446, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1865.ts EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1862 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1862 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1862 +as +select t1.* +from test_hive_1865 t1 +inner join test_hive_1863 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1863 +PREHOOK: Input: default@test_hive_1864 +PREHOOK: Input: default@test_hive_1865 +PREHOOK: Input: default@test_hive_1866 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1862 +POSTHOOK: query: create view test_hive_1862 +as +select t1.* +from test_hive_1865 t1 +inner join test_hive_1863 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1863 +POSTHOOK: Input: default@test_hive_1864 +POSTHOOK: Input: default@test_hive_1865 +POSTHOOK: Input: default@test_hive_1866 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1862 +POSTHOOK: Lineage: test_hive_1862.creation_date EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.ds EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.ds_ts SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.source_file_name SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_1855 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1855, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_1856 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1856, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_1857 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1857, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_1858 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1858, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_1859 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1859, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_1860 EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_1860, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.test_hive_446 SIMPLE [(test_hive_1864)test_hive_1864.FieldSchema(name:test_hive_446, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1862.ts EXPRESSION [(test_hive_1864)test_hive_1864.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1873 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1873 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1873 +( + test_hive_1869 string + ,test_hive_1867 string + ,test_hive_1870 string + ,test_hive_447 string + ,test_hive_1868 string + ,test_hive_1871 string + ,test_hive_1872 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1873 +POSTHOOK: query: create table test_hive_1873 +( + test_hive_1869 string + ,test_hive_1867 string + ,test_hive_1870 string + ,test_hive_447 string + ,test_hive_1868 string + ,test_hive_1871 string + ,test_hive_1872 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1873 +PREHOOK: query: create table if not exists test_hive_1876 +( + test_hive_1869 string + ,test_hive_1867 string + ,test_hive_1870 string + ,test_hive_447 string + ,test_hive_1868 string + ,test_hive_1871 string + ,test_hive_1872 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1876 +POSTHOOK: query: create table if not exists test_hive_1876 +( + test_hive_1869 string + ,test_hive_1867 string + ,test_hive_1870 string + ,test_hive_447 string + ,test_hive_1868 string + ,test_hive_1871 string + ,test_hive_1872 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1876 +PREHOOK: query: drop table if exists test_hive_1875 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1875 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1875 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1875 +POSTHOOK: query: create table if not exists test_hive_1875 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1875 +PREHOOK: query: drop view if exists test_hive_1878 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1878 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1878 +as +select + cast(test_hive_1869 as int) as test_hive_1869 + ,cast(test_hive_1867 as int) as test_hive_1867 + ,cast(test_hive_1870 as int) as test_hive_1870 + ,cast(test_hive_447 as string) as test_hive_447 + ,cast(test_hive_1868 as string) as test_hive_1868 + ,cast(test_hive_1871 as string) as test_hive_1871 + ,cast(from_unixtime(unix_timestamp(test_hive_1872,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1872 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1876 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1876 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1878 +POSTHOOK: query: create view if not exists test_hive_1878 +as +select + cast(test_hive_1869 as int) as test_hive_1869 + ,cast(test_hive_1867 as int) as test_hive_1867 + ,cast(test_hive_1870 as int) as test_hive_1870 + ,cast(test_hive_447 as string) as test_hive_447 + ,cast(test_hive_1868 as string) as test_hive_1868 + ,cast(test_hive_1871 as string) as test_hive_1871 + ,cast(from_unixtime(unix_timestamp(test_hive_1872,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1872 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1876 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1876 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1878 +POSTHOOK: Lineage: test_hive_1878.creation_date EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.ds EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.ds_ts SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.source_file_name SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_1867 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1867, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_1868 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1868, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_1869 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1869, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_1870 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1870, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_1871 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1871, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_1872 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1872, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.test_hive_447 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_447, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1878.ts EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1877 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1877 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1877 +as +select + test_hive_1869 as test_hive_1869 + ,test_hive_1867 as test_hive_1867 + ,test_hive_1870 as test_hive_1870 + ,test_hive_447 as test_hive_447 + ,test_hive_1868 as test_hive_1868 + ,test_hive_1871 as test_hive_1871 + ,test_hive_1872 as test_hive_1872 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1878 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1876 +PREHOOK: Input: default@test_hive_1878 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1877 +POSTHOOK: query: create view test_hive_1877 +as +select + test_hive_1869 as test_hive_1869 + ,test_hive_1867 as test_hive_1867 + ,test_hive_1870 as test_hive_1870 + ,test_hive_447 as test_hive_447 + ,test_hive_1868 as test_hive_1868 + ,test_hive_1871 as test_hive_1871 + ,test_hive_1872 as test_hive_1872 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1878 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1876 +POSTHOOK: Input: default@test_hive_1878 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1877 +POSTHOOK: Lineage: test_hive_1877.creation_date EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.ds EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.ds_ts SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.source_file_name SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_1867 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1867, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_1868 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1868, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_1869 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1869, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_1870 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1870, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_1871 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1871, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_1872 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1872, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.test_hive_447 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_447, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1877.ts EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1874 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1874 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1874 +as +select t1.* +from test_hive_1877 t1 +inner join test_hive_1875 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1875 +PREHOOK: Input: default@test_hive_1876 +PREHOOK: Input: default@test_hive_1877 +PREHOOK: Input: default@test_hive_1878 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1874 +POSTHOOK: query: create view test_hive_1874 +as +select t1.* +from test_hive_1877 t1 +inner join test_hive_1875 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1875 +POSTHOOK: Input: default@test_hive_1876 +POSTHOOK: Input: default@test_hive_1877 +POSTHOOK: Input: default@test_hive_1878 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1874 +POSTHOOK: Lineage: test_hive_1874.creation_date EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.ds EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.ds_ts SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.source_file_name SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_1867 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1867, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_1868 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1868, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_1869 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1869, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_1870 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1870, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_1871 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1871, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_1872 EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_1872, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.test_hive_447 SIMPLE [(test_hive_1876)test_hive_1876.FieldSchema(name:test_hive_447, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1874.ts EXPRESSION [(test_hive_1876)test_hive_1876.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1299 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1299 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1299 +( + test_hive_1288 string + ,test_hive_1287 string + ,test_hive_1289 string + ,test_hive_1282 string + ,test_hive_1285 string + ,test_hive_1283 string + ,test_hive_12832 string + ,test_hive_1286 string + ,test_hive_328 string + ,test_hive_316 string + ,test_hive_322 string + ,test_hive_327 string + ,test_hive_325 string + ,test_hive_313 string + ,test_hive_320 string + ,test_hive_318 string + ,test_hive_319 string + ,test_hive_331 string + ,test_hive_332 string + ,test_hive_333 string + ,test_hive_314 string + ,test_hive_321 string + ,test_hive_315 string + ,test_hive_324 string + ,test_hive_323 string + ,test_hive_326 string + ,test_hive_310 string + ,test_hive_311 string + ,test_hive_312 string + ,test_hive_317 string + ,test_hive_329 string + ,test_hive_330 string + ,test_hive_309 string + ,test_hive_1290 string + ,test_hive_1290_lag string + ,test_hive_1290_mil string + ,test_hive_1290_lag_mil string + ,test_hive_1290_bp string + ,test_hive_1290_bp_lag string + ,test_hive_1290_con string + ,test_hive_1290_con_lag string + ,test_hive_1298 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1299 +POSTHOOK: query: create table test_hive_1299 +( + test_hive_1288 string + ,test_hive_1287 string + ,test_hive_1289 string + ,test_hive_1282 string + ,test_hive_1285 string + ,test_hive_1283 string + ,test_hive_12832 string + ,test_hive_1286 string + ,test_hive_328 string + ,test_hive_316 string + ,test_hive_322 string + ,test_hive_327 string + ,test_hive_325 string + ,test_hive_313 string + ,test_hive_320 string + ,test_hive_318 string + ,test_hive_319 string + ,test_hive_331 string + ,test_hive_332 string + ,test_hive_333 string + ,test_hive_314 string + ,test_hive_321 string + ,test_hive_315 string + ,test_hive_324 string + ,test_hive_323 string + ,test_hive_326 string + ,test_hive_310 string + ,test_hive_311 string + ,test_hive_312 string + ,test_hive_317 string + ,test_hive_329 string + ,test_hive_330 string + ,test_hive_309 string + ,test_hive_1290 string + ,test_hive_1290_lag string + ,test_hive_1290_mil string + ,test_hive_1290_lag_mil string + ,test_hive_1290_bp string + ,test_hive_1290_bp_lag string + ,test_hive_1290_con string + ,test_hive_1290_con_lag string + ,test_hive_1298 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1299 +PREHOOK: query: create table if not exists test_hive_1302 +( + test_hive_1288 string + ,test_hive_1287 string + ,test_hive_1289 string + ,test_hive_1282 string + ,test_hive_1285 string + ,test_hive_1283 string + ,test_hive_12832 string + ,test_hive_1286 string + ,test_hive_328 string + ,test_hive_316 string + ,test_hive_322 string + ,test_hive_327 string + ,test_hive_325 string + ,test_hive_313 string + ,test_hive_320 string + ,test_hive_318 string + ,test_hive_319 string + ,test_hive_331 string + ,test_hive_332 string + ,test_hive_333 string + ,test_hive_314 string + ,test_hive_321 string + ,test_hive_315 string + ,test_hive_324 string + ,test_hive_323 string + ,test_hive_326 string + ,test_hive_310 string + ,test_hive_311 string + ,test_hive_312 string + ,test_hive_317 string + ,test_hive_329 string + ,test_hive_330 string + ,test_hive_309 string + ,test_hive_1290 string + ,test_hive_1290_lag string + ,test_hive_1290_mil string + ,test_hive_1290_lag_mil string + ,test_hive_1290_bp string + ,test_hive_1290_bp_lag string + ,test_hive_1290_con string + ,test_hive_1290_con_lag string + ,test_hive_1298 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1302 +POSTHOOK: query: create table if not exists test_hive_1302 +( + test_hive_1288 string + ,test_hive_1287 string + ,test_hive_1289 string + ,test_hive_1282 string + ,test_hive_1285 string + ,test_hive_1283 string + ,test_hive_12832 string + ,test_hive_1286 string + ,test_hive_328 string + ,test_hive_316 string + ,test_hive_322 string + ,test_hive_327 string + ,test_hive_325 string + ,test_hive_313 string + ,test_hive_320 string + ,test_hive_318 string + ,test_hive_319 string + ,test_hive_331 string + ,test_hive_332 string + ,test_hive_333 string + ,test_hive_314 string + ,test_hive_321 string + ,test_hive_315 string + ,test_hive_324 string + ,test_hive_323 string + ,test_hive_326 string + ,test_hive_310 string + ,test_hive_311 string + ,test_hive_312 string + ,test_hive_317 string + ,test_hive_329 string + ,test_hive_330 string + ,test_hive_309 string + ,test_hive_1290 string + ,test_hive_1290_lag string + ,test_hive_1290_mil string + ,test_hive_1290_lag_mil string + ,test_hive_1290_bp string + ,test_hive_1290_bp_lag string + ,test_hive_1290_con string + ,test_hive_1290_con_lag string + ,test_hive_1298 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1302 +PREHOOK: query: drop table if exists test_hive_1301 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1301 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1301 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1301 +POSTHOOK: query: create table if not exists test_hive_1301 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1301 +PREHOOK: query: drop view if exists test_hive_1304 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1304 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1304 +as +select + cast(test_hive_1288 as int) as test_hive_1288 + ,cast(test_hive_1287 as int) as test_hive_1287 + ,cast(test_hive_1289 as int) as test_hive_1289 + ,cast(test_hive_1282 as string) as test_hive_1282 + ,cast(test_hive_1285 as string) as test_hive_1285 + ,cast(test_hive_1283 as string) as test_hive_1283 + ,cast(test_hive_12832 as string) as test_hive_12832 + ,cast(test_hive_1286 as string) as test_hive_1286 + ,cast(test_hive_328 as string) as test_hive_328 + ,cast(test_hive_316 as string) as test_hive_316 + ,cast(test_hive_322 as string) as test_hive_322 + ,cast(test_hive_327 as string) as test_hive_327 + ,cast(test_hive_325 as string) as test_hive_325 + ,cast(test_hive_313 as string) as test_hive_313 + ,cast(test_hive_320 as string) as test_hive_320 + ,cast(test_hive_318 as string) as test_hive_318 + ,cast(test_hive_319 as string) as test_hive_319 + ,cast(test_hive_331 as string) as test_hive_331 + ,cast(test_hive_332 as string) as test_hive_332 + ,cast(test_hive_333 as string) as test_hive_333 + ,cast(test_hive_314 as string) as test_hive_314 + ,cast(test_hive_321 as string) as test_hive_321 + ,cast(test_hive_315 as string) as test_hive_315 + ,cast(test_hive_324 as string) as test_hive_324 + ,cast(test_hive_323 as string) as test_hive_323 + ,cast(test_hive_326 as string) as test_hive_326 + ,cast(test_hive_310 as string) as test_hive_310 + ,cast(test_hive_311 as string) as test_hive_311 + ,cast(test_hive_312 as string) as test_hive_312 + ,cast(test_hive_317 as string) as test_hive_317 + ,cast(test_hive_329 as string) as test_hive_329 + ,cast(test_hive_330 as string) as test_hive_330 + ,cast(test_hive_309 as string) as test_hive_309 + ,cast(test_hive_1290 as double) as test_hive_1290 + ,cast(test_hive_1290_lag as double) as test_hive_1290_lag + ,cast(test_hive_1290_mil as double) as test_hive_1290_mil + ,cast(test_hive_1290_lag_mil as double) as test_hive_1290_lag_mil + ,cast(test_hive_1290_bp as double) as test_hive_1290_bp + ,cast(test_hive_1290_bp_lag as double) as test_hive_1290_bp_lag + ,cast(test_hive_1290_con as double) as test_hive_1290_con + ,cast(test_hive_1290_con_lag as double) as test_hive_1290_con_lag + ,cast(from_unixtime(unix_timestamp(test_hive_1298,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1298 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1302 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1302 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1304 +POSTHOOK: query: create view if not exists test_hive_1304 +as +select + cast(test_hive_1288 as int) as test_hive_1288 + ,cast(test_hive_1287 as int) as test_hive_1287 + ,cast(test_hive_1289 as int) as test_hive_1289 + ,cast(test_hive_1282 as string) as test_hive_1282 + ,cast(test_hive_1285 as string) as test_hive_1285 + ,cast(test_hive_1283 as string) as test_hive_1283 + ,cast(test_hive_12832 as string) as test_hive_12832 + ,cast(test_hive_1286 as string) as test_hive_1286 + ,cast(test_hive_328 as string) as test_hive_328 + ,cast(test_hive_316 as string) as test_hive_316 + ,cast(test_hive_322 as string) as test_hive_322 + ,cast(test_hive_327 as string) as test_hive_327 + ,cast(test_hive_325 as string) as test_hive_325 + ,cast(test_hive_313 as string) as test_hive_313 + ,cast(test_hive_320 as string) as test_hive_320 + ,cast(test_hive_318 as string) as test_hive_318 + ,cast(test_hive_319 as string) as test_hive_319 + ,cast(test_hive_331 as string) as test_hive_331 + ,cast(test_hive_332 as string) as test_hive_332 + ,cast(test_hive_333 as string) as test_hive_333 + ,cast(test_hive_314 as string) as test_hive_314 + ,cast(test_hive_321 as string) as test_hive_321 + ,cast(test_hive_315 as string) as test_hive_315 + ,cast(test_hive_324 as string) as test_hive_324 + ,cast(test_hive_323 as string) as test_hive_323 + ,cast(test_hive_326 as string) as test_hive_326 + ,cast(test_hive_310 as string) as test_hive_310 + ,cast(test_hive_311 as string) as test_hive_311 + ,cast(test_hive_312 as string) as test_hive_312 + ,cast(test_hive_317 as string) as test_hive_317 + ,cast(test_hive_329 as string) as test_hive_329 + ,cast(test_hive_330 as string) as test_hive_330 + ,cast(test_hive_309 as string) as test_hive_309 + ,cast(test_hive_1290 as double) as test_hive_1290 + ,cast(test_hive_1290_lag as double) as test_hive_1290_lag + ,cast(test_hive_1290_mil as double) as test_hive_1290_mil + ,cast(test_hive_1290_lag_mil as double) as test_hive_1290_lag_mil + ,cast(test_hive_1290_bp as double) as test_hive_1290_bp + ,cast(test_hive_1290_bp_lag as double) as test_hive_1290_bp_lag + ,cast(test_hive_1290_con as double) as test_hive_1290_con + ,cast(test_hive_1290_con_lag as double) as test_hive_1290_con_lag + ,cast(from_unixtime(unix_timestamp(test_hive_1298,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1298 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1302 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1302 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1304 +POSTHOOK: Lineage: test_hive_1304.creation_date EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.ds EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.ds_ts SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.source_file_name SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1282 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1282, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1283 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1283, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_12832 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_12832, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1285 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1285, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1286 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1286, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1287 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1287, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1288 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1288, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1289 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1289, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_bp EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_bp_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_bp_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_con EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_con_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_con_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_lag_mil EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_lag_mil, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1290_mil EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_mil, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_1298 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1298, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_309 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_309, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_310 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_310, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_311 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_311, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_312 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_312, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_313 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_313, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_314 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_314, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_315 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_315, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_316 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_316, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_317 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_317, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_318 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_318, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_319 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_319, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_320 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_320, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_321 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_321, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_322 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_322, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_323 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_323, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_324 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_324, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_325 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_325, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_326 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_326, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_327 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_327, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_328 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_328, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_329 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_329, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_330 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_330, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_331 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_331, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_332 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_332, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.test_hive_333 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_333, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1304.ts EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1303 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1303 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1303 +as +select + test_hive_1288 as test_hive_1288 + ,test_hive_1287 as test_hive_1287 + ,test_hive_1289 as test_hive_1289 + ,test_hive_1282 as test_hive_1282 + ,test_hive_1285 as test_hive_1285 + ,test_hive_1283 as test_hive_1283 + ,test_hive_12832 as test_hive_12832 + ,test_hive_1286 as test_hive_1286 + ,test_hive_328 as test_hive_328 + ,test_hive_316 as test_hive_316 + ,test_hive_322 as test_hive_322 + ,test_hive_327 as test_hive_327 + ,test_hive_325 as test_hive_325 + ,test_hive_313 as test_hive_313 + ,test_hive_320 as test_hive_320 + ,test_hive_318 as test_hive_318 + ,test_hive_319 as test_hive_319 + ,test_hive_331 as test_hive_331 + ,test_hive_332 as test_hive_332 + ,test_hive_333 as test_hive_333 + ,test_hive_314 as test_hive_314 + ,test_hive_321 as test_hive_321 + ,test_hive_315 as test_hive_315 + ,test_hive_324 as test_hive_324 + ,test_hive_323 as test_hive_323 + ,test_hive_326 as test_hive_326 + ,test_hive_310 as test_hive_310 + ,test_hive_311 as test_hive_311 + ,test_hive_312 as test_hive_312 + ,test_hive_317 as test_hive_317 + ,test_hive_329 as test_hive_329 + ,test_hive_330 as test_hive_330 + ,test_hive_309 as test_hive_309 + ,test_hive_1290 as test_hive_1290 + ,test_hive_1290_lag as test_hive_1290_lag + ,test_hive_1290_mil as test_hive_1290_mil + ,test_hive_1290_lag_mil as test_hive_1290_lag_mil + ,test_hive_1290_bp as test_hive_1290_bp + ,test_hive_1290_bp_lag as test_hive_1290_bp_lag + ,test_hive_1290_con as test_hive_1290_con + ,test_hive_1290_con_lag as test_hive_1290_con_lag + ,test_hive_1298 as test_hive_1298 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1304 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1302 +PREHOOK: Input: default@test_hive_1304 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1303 +POSTHOOK: query: create view test_hive_1303 +as +select + test_hive_1288 as test_hive_1288 + ,test_hive_1287 as test_hive_1287 + ,test_hive_1289 as test_hive_1289 + ,test_hive_1282 as test_hive_1282 + ,test_hive_1285 as test_hive_1285 + ,test_hive_1283 as test_hive_1283 + ,test_hive_12832 as test_hive_12832 + ,test_hive_1286 as test_hive_1286 + ,test_hive_328 as test_hive_328 + ,test_hive_316 as test_hive_316 + ,test_hive_322 as test_hive_322 + ,test_hive_327 as test_hive_327 + ,test_hive_325 as test_hive_325 + ,test_hive_313 as test_hive_313 + ,test_hive_320 as test_hive_320 + ,test_hive_318 as test_hive_318 + ,test_hive_319 as test_hive_319 + ,test_hive_331 as test_hive_331 + ,test_hive_332 as test_hive_332 + ,test_hive_333 as test_hive_333 + ,test_hive_314 as test_hive_314 + ,test_hive_321 as test_hive_321 + ,test_hive_315 as test_hive_315 + ,test_hive_324 as test_hive_324 + ,test_hive_323 as test_hive_323 + ,test_hive_326 as test_hive_326 + ,test_hive_310 as test_hive_310 + ,test_hive_311 as test_hive_311 + ,test_hive_312 as test_hive_312 + ,test_hive_317 as test_hive_317 + ,test_hive_329 as test_hive_329 + ,test_hive_330 as test_hive_330 + ,test_hive_309 as test_hive_309 + ,test_hive_1290 as test_hive_1290 + ,test_hive_1290_lag as test_hive_1290_lag + ,test_hive_1290_mil as test_hive_1290_mil + ,test_hive_1290_lag_mil as test_hive_1290_lag_mil + ,test_hive_1290_bp as test_hive_1290_bp + ,test_hive_1290_bp_lag as test_hive_1290_bp_lag + ,test_hive_1290_con as test_hive_1290_con + ,test_hive_1290_con_lag as test_hive_1290_con_lag + ,test_hive_1298 as test_hive_1298 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1304 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1302 +POSTHOOK: Input: default@test_hive_1304 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1303 +POSTHOOK: Lineage: test_hive_1303.creation_date EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.ds EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.ds_ts SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.source_file_name SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1282 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1282, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1283 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1283, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_12832 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_12832, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1285 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1285, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1286 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1286, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1287 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1287, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1288 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1288, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1289 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1289, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_bp EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_bp_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_bp_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_con EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_con_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_con_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_lag_mil EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_lag_mil, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1290_mil EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_mil, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_1298 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1298, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_309 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_309, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_310 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_310, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_311 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_311, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_312 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_312, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_313 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_313, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_314 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_314, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_315 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_315, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_316 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_316, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_317 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_317, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_318 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_318, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_319 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_319, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_320 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_320, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_321 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_321, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_322 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_322, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_323 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_323, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_324 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_324, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_325 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_325, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_326 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_326, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_327 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_327, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_328 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_328, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_329 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_329, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_330 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_330, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_331 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_331, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_332 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_332, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.test_hive_333 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_333, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1303.ts EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1300 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1300 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1300 +as +select t1.* +from test_hive_1303 t1 +inner join test_hive_1301 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1301 +PREHOOK: Input: default@test_hive_1302 +PREHOOK: Input: default@test_hive_1303 +PREHOOK: Input: default@test_hive_1304 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1300 +POSTHOOK: query: create view test_hive_1300 +as +select t1.* +from test_hive_1303 t1 +inner join test_hive_1301 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1301 +POSTHOOK: Input: default@test_hive_1302 +POSTHOOK: Input: default@test_hive_1303 +POSTHOOK: Input: default@test_hive_1304 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1300 +POSTHOOK: Lineage: test_hive_1300.creation_date EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.ds EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.ds_ts SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.source_file_name SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1282 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1282, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1283 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1283, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_12832 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_12832, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1285 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1285, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1286 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1286, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1287 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1287, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1288 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1288, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1289 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1289, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_bp EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_bp, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_bp_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_bp_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_con EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_con, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_con_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_con_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_lag EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_lag, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_lag_mil EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_lag_mil, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1290_mil EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1290_mil, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_1298 EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_1298, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_309 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_309, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_310 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_310, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_311 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_311, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_312 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_312, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_313 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_313, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_314 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_314, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_315 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_315, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_316 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_316, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_317 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_317, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_318 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_318, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_319 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_319, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_320 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_320, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_321 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_321, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_322 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_322, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_323 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_323, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_324 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_324, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_325 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_325, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_326 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_326, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_327 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_327, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_328 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_328, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_329 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_329, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_330 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_330, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_331 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_331, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_332 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_332, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.test_hive_333 SIMPLE [(test_hive_1302)test_hive_1302.FieldSchema(name:test_hive_333, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1300.ts EXPRESSION [(test_hive_1302)test_hive_1302.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_2027 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2027 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_2027 +( + test_hive_2021 string + ,test_hive_2019 string + ,test_hive_2022 string + ,test_hive_458 string + ,test_hive_2020 string + ,test_hive_2025 string + ,test_hive_2024 string + ,test_hive_2023 string + ,test_hive_2026 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2027 +POSTHOOK: query: create table test_hive_2027 +( + test_hive_2021 string + ,test_hive_2019 string + ,test_hive_2022 string + ,test_hive_458 string + ,test_hive_2020 string + ,test_hive_2025 string + ,test_hive_2024 string + ,test_hive_2023 string + ,test_hive_2026 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2027 +PREHOOK: query: create table if not exists test_hive_2030 +( + test_hive_2021 string + ,test_hive_2019 string + ,test_hive_2022 string + ,test_hive_458 string + ,test_hive_2020 string + ,test_hive_2025 string + ,test_hive_2024 string + ,test_hive_2023 string + ,test_hive_2026 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2030 +POSTHOOK: query: create table if not exists test_hive_2030 +( + test_hive_2021 string + ,test_hive_2019 string + ,test_hive_2022 string + ,test_hive_458 string + ,test_hive_2020 string + ,test_hive_2025 string + ,test_hive_2024 string + ,test_hive_2023 string + ,test_hive_2026 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2030 +PREHOOK: query: drop table if exists test_hive_2029 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2029 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_2029 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2029 +POSTHOOK: query: create table if not exists test_hive_2029 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2029 +PREHOOK: query: drop view if exists test_hive_2032 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2032 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_2032 +as +select + cast(test_hive_2021 as int) as test_hive_2021 + ,cast(test_hive_2019 as int) as test_hive_2019 + ,cast(test_hive_2022 as int) as test_hive_2022 + ,cast(test_hive_458 as string) as test_hive_458 + ,cast(test_hive_2020 as string) as test_hive_2020 + ,cast(test_hive_2025 as string) as test_hive_2025 + ,cast(test_hive_2024 as string) as test_hive_2024 + ,cast(test_hive_2023 as string) as test_hive_2023 + ,cast(from_unixtime(unix_timestamp(test_hive_2026,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2026 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2030 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2030 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2032 +POSTHOOK: query: create view if not exists test_hive_2032 +as +select + cast(test_hive_2021 as int) as test_hive_2021 + ,cast(test_hive_2019 as int) as test_hive_2019 + ,cast(test_hive_2022 as int) as test_hive_2022 + ,cast(test_hive_458 as string) as test_hive_458 + ,cast(test_hive_2020 as string) as test_hive_2020 + ,cast(test_hive_2025 as string) as test_hive_2025 + ,cast(test_hive_2024 as string) as test_hive_2024 + ,cast(test_hive_2023 as string) as test_hive_2023 + ,cast(from_unixtime(unix_timestamp(test_hive_2026,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2026 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2030 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2030 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2032 +POSTHOOK: Lineage: test_hive_2032.creation_date EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.ds EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.ds_ts SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.source_file_name SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2019 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2019, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2020 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2020, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2021 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2021, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2022 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2022, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2023 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2023, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2024 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2024, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2025 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2025, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_2026 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2026, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.test_hive_458 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_458, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2032.ts EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2031 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2031 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2031 +as +select + test_hive_2021 as test_hive_2021 + ,test_hive_2019 as test_hive_2019 + ,test_hive_2022 as test_hive_2022 + ,test_hive_458 as test_hive_458 + ,test_hive_2020 as test_hive_2020 + ,test_hive_2025 as test_hive_2025 + ,test_hive_2024 as test_hive_2024 + ,test_hive_2023 as test_hive_2023 + ,test_hive_2026 as test_hive_2026 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2032 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2030 +PREHOOK: Input: default@test_hive_2032 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2031 +POSTHOOK: query: create view test_hive_2031 +as +select + test_hive_2021 as test_hive_2021 + ,test_hive_2019 as test_hive_2019 + ,test_hive_2022 as test_hive_2022 + ,test_hive_458 as test_hive_458 + ,test_hive_2020 as test_hive_2020 + ,test_hive_2025 as test_hive_2025 + ,test_hive_2024 as test_hive_2024 + ,test_hive_2023 as test_hive_2023 + ,test_hive_2026 as test_hive_2026 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2032 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2030 +POSTHOOK: Input: default@test_hive_2032 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2031 +POSTHOOK: Lineage: test_hive_2031.creation_date EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.ds EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.ds_ts SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.source_file_name SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2019 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2019, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2020 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2020, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2021 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2021, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2022 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2022, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2023 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2023, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2024 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2024, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2025 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2025, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_2026 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2026, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.test_hive_458 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_458, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2031.ts EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2028 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2028 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2028 +as +select t1.* +from test_hive_2031 t1 +inner join test_hive_2029 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2029 +PREHOOK: Input: default@test_hive_2030 +PREHOOK: Input: default@test_hive_2031 +PREHOOK: Input: default@test_hive_2032 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2028 +POSTHOOK: query: create view test_hive_2028 +as +select t1.* +from test_hive_2031 t1 +inner join test_hive_2029 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2029 +POSTHOOK: Input: default@test_hive_2030 +POSTHOOK: Input: default@test_hive_2031 +POSTHOOK: Input: default@test_hive_2032 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2028 +POSTHOOK: Lineage: test_hive_2028.creation_date EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.ds EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.ds_ts SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.source_file_name SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2019 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2019, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2020 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2020, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2021 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2021, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2022 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2022, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2023 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2023, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2024 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2024, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2025 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2025, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_2026 EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_2026, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.test_hive_458 SIMPLE [(test_hive_2030)test_hive_2030.FieldSchema(name:test_hive_458, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2028.ts EXPRESSION [(test_hive_2030)test_hive_2030.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_2013 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2013 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_2013 +( + test_hive_2008 string + ,test_hive_2006 string + ,test_hive_2009 string + ,test_hive_457 string + ,test_hive_2007 string + ,test_hive_2011 string + ,test_hive_2010 string + ,test_hive_2012 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2013 +POSTHOOK: query: create table test_hive_2013 +( + test_hive_2008 string + ,test_hive_2006 string + ,test_hive_2009 string + ,test_hive_457 string + ,test_hive_2007 string + ,test_hive_2011 string + ,test_hive_2010 string + ,test_hive_2012 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2013 +PREHOOK: query: create table if not exists test_hive_2016 +( + test_hive_2008 string + ,test_hive_2006 string + ,test_hive_2009 string + ,test_hive_457 string + ,test_hive_2007 string + ,test_hive_2011 string + ,test_hive_2010 string + ,test_hive_2012 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2016 +POSTHOOK: query: create table if not exists test_hive_2016 +( + test_hive_2008 string + ,test_hive_2006 string + ,test_hive_2009 string + ,test_hive_457 string + ,test_hive_2007 string + ,test_hive_2011 string + ,test_hive_2010 string + ,test_hive_2012 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2016 +PREHOOK: query: drop table if exists test_hive_2015 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2015 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_2015 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2015 +POSTHOOK: query: create table if not exists test_hive_2015 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2015 +PREHOOK: query: drop view if exists test_hive_2018 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2018 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_2018 +as +select + cast(test_hive_2008 as int) as test_hive_2008 + ,cast(test_hive_2006 as int) as test_hive_2006 + ,cast(test_hive_2009 as int) as test_hive_2009 + ,cast(test_hive_457 as string) as test_hive_457 + ,cast(test_hive_2007 as string) as test_hive_2007 + ,cast(test_hive_2011 as string) as test_hive_2011 + ,cast(test_hive_2010 as string) as test_hive_2010 + ,cast(from_unixtime(unix_timestamp(test_hive_2012,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2012 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2016 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2016 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2018 +POSTHOOK: query: create view if not exists test_hive_2018 +as +select + cast(test_hive_2008 as int) as test_hive_2008 + ,cast(test_hive_2006 as int) as test_hive_2006 + ,cast(test_hive_2009 as int) as test_hive_2009 + ,cast(test_hive_457 as string) as test_hive_457 + ,cast(test_hive_2007 as string) as test_hive_2007 + ,cast(test_hive_2011 as string) as test_hive_2011 + ,cast(test_hive_2010 as string) as test_hive_2010 + ,cast(from_unixtime(unix_timestamp(test_hive_2012,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2012 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2016 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2016 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2018 +POSTHOOK: Lineage: test_hive_2018.creation_date EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.ds EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.ds_ts SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.source_file_name SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2006 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2006, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2007 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2007, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2008 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2008, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2009 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2009, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2010 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2010, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2011 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2011, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_2012 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2012, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.test_hive_457 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_457, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2018.ts EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2017 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2017 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2017 +as +select + test_hive_2008 as test_hive_2008 + ,test_hive_2006 as test_hive_2006 + ,test_hive_2009 as test_hive_2009 + ,test_hive_457 as test_hive_457 + ,test_hive_2007 as test_hive_2007 + ,test_hive_2011 as test_hive_2011 + ,test_hive_2010 as test_hive_2010 + ,test_hive_2012 as test_hive_2012 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2018 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2016 +PREHOOK: Input: default@test_hive_2018 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2017 +POSTHOOK: query: create view test_hive_2017 +as +select + test_hive_2008 as test_hive_2008 + ,test_hive_2006 as test_hive_2006 + ,test_hive_2009 as test_hive_2009 + ,test_hive_457 as test_hive_457 + ,test_hive_2007 as test_hive_2007 + ,test_hive_2011 as test_hive_2011 + ,test_hive_2010 as test_hive_2010 + ,test_hive_2012 as test_hive_2012 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2018 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2016 +POSTHOOK: Input: default@test_hive_2018 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2017 +POSTHOOK: Lineage: test_hive_2017.creation_date EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.ds EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.ds_ts SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.source_file_name SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2006 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2006, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2007 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2007, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2008 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2008, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2009 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2009, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2010 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2010, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2011 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2011, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_2012 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2012, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.test_hive_457 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_457, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2017.ts EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2014 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2014 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2014 +as +select t1.* +from test_hive_2017 t1 +inner join test_hive_2015 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2015 +PREHOOK: Input: default@test_hive_2016 +PREHOOK: Input: default@test_hive_2017 +PREHOOK: Input: default@test_hive_2018 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2014 +POSTHOOK: query: create view test_hive_2014 +as +select t1.* +from test_hive_2017 t1 +inner join test_hive_2015 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2015 +POSTHOOK: Input: default@test_hive_2016 +POSTHOOK: Input: default@test_hive_2017 +POSTHOOK: Input: default@test_hive_2018 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2014 +POSTHOOK: Lineage: test_hive_2014.creation_date EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.ds EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.ds_ts SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.source_file_name SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2006 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2006, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2007 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2007, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2008 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2008, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2009 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2009, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2010 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2010, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2011 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2011, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_2012 EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_2012, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.test_hive_457 SIMPLE [(test_hive_2016)test_hive_2016.FieldSchema(name:test_hive_457, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2014.ts EXPRESSION [(test_hive_2016)test_hive_2016.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_2000 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2000 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_2000 +( + test_hive_1996 string + ,test_hive_1994 string + ,test_hive_1997 string + ,test_hive_456 string + ,test_hive_1995 string + ,test_hive_1998 string + ,test_hive_1999 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2000 +POSTHOOK: query: create table test_hive_2000 +( + test_hive_1996 string + ,test_hive_1994 string + ,test_hive_1997 string + ,test_hive_456 string + ,test_hive_1995 string + ,test_hive_1998 string + ,test_hive_1999 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2000 +PREHOOK: query: create table if not exists test_hive_2003 +( + test_hive_1996 string + ,test_hive_1994 string + ,test_hive_1997 string + ,test_hive_456 string + ,test_hive_1995 string + ,test_hive_1998 string + ,test_hive_1999 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2003 +POSTHOOK: query: create table if not exists test_hive_2003 +( + test_hive_1996 string + ,test_hive_1994 string + ,test_hive_1997 string + ,test_hive_456 string + ,test_hive_1995 string + ,test_hive_1998 string + ,test_hive_1999 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2003 +PREHOOK: query: drop table if exists test_hive_2002 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2002 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_2002 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2002 +POSTHOOK: query: create table if not exists test_hive_2002 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2002 +PREHOOK: query: drop view if exists test_hive_2005 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2005 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_2005 +as +select + cast(test_hive_1996 as int) as test_hive_1996 + ,cast(test_hive_1994 as int) as test_hive_1994 + ,cast(test_hive_1997 as int) as test_hive_1997 + ,cast(test_hive_456 as string) as test_hive_456 + ,cast(test_hive_1995 as string) as test_hive_1995 + ,cast(test_hive_1998 as string) as test_hive_1998 + ,cast(from_unixtime(unix_timestamp(test_hive_1999,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1999 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2003 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2003 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2005 +POSTHOOK: query: create view if not exists test_hive_2005 +as +select + cast(test_hive_1996 as int) as test_hive_1996 + ,cast(test_hive_1994 as int) as test_hive_1994 + ,cast(test_hive_1997 as int) as test_hive_1997 + ,cast(test_hive_456 as string) as test_hive_456 + ,cast(test_hive_1995 as string) as test_hive_1995 + ,cast(test_hive_1998 as string) as test_hive_1998 + ,cast(from_unixtime(unix_timestamp(test_hive_1999,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1999 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2003 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2003 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2005 +POSTHOOK: Lineage: test_hive_2005.creation_date EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.ds EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.ds_ts SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.source_file_name SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_1994 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1994, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_1995 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1995, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_1996 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1996, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_1997 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1997, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_1998 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1998, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_1999 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1999, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.test_hive_456 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_456, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2005.ts EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2004 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2004 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2004 +as +select + test_hive_1996 as test_hive_1996 + ,test_hive_1994 as test_hive_1994 + ,test_hive_1997 as test_hive_1997 + ,test_hive_456 as test_hive_456 + ,test_hive_1995 as test_hive_1995 + ,test_hive_1998 as test_hive_1998 + ,test_hive_1999 as test_hive_1999 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2005 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2003 +PREHOOK: Input: default@test_hive_2005 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2004 +POSTHOOK: query: create view test_hive_2004 +as +select + test_hive_1996 as test_hive_1996 + ,test_hive_1994 as test_hive_1994 + ,test_hive_1997 as test_hive_1997 + ,test_hive_456 as test_hive_456 + ,test_hive_1995 as test_hive_1995 + ,test_hive_1998 as test_hive_1998 + ,test_hive_1999 as test_hive_1999 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2005 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2003 +POSTHOOK: Input: default@test_hive_2005 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2004 +POSTHOOK: Lineage: test_hive_2004.creation_date EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.ds EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.ds_ts SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.source_file_name SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_1994 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1994, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_1995 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1995, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_1996 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1996, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_1997 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1997, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_1998 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1998, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_1999 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1999, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.test_hive_456 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_456, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2004.ts EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2001 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2001 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2001 +as +select t1.* +from test_hive_2004 t1 +inner join test_hive_2002 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2002 +PREHOOK: Input: default@test_hive_2003 +PREHOOK: Input: default@test_hive_2004 +PREHOOK: Input: default@test_hive_2005 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2001 +POSTHOOK: query: create view test_hive_2001 +as +select t1.* +from test_hive_2004 t1 +inner join test_hive_2002 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2002 +POSTHOOK: Input: default@test_hive_2003 +POSTHOOK: Input: default@test_hive_2004 +POSTHOOK: Input: default@test_hive_2005 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2001 +POSTHOOK: Lineage: test_hive_2001.creation_date EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.ds EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.ds_ts SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.source_file_name SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_1994 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1994, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_1995 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1995, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_1996 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1996, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_1997 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1997, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_1998 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1998, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_1999 EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_1999, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.test_hive_456 SIMPLE [(test_hive_2003)test_hive_2003.FieldSchema(name:test_hive_456, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2001.ts EXPRESSION [(test_hive_2003)test_hive_2003.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1988 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1988 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1988 +( + test_hive_1984 string + ,test_hive_1982 string + ,test_hive_1985 string + ,test_hive_455 string + ,test_hive_1983 string + ,test_hive_1986 string + ,test_hive_1987 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1988 +POSTHOOK: query: create table test_hive_1988 +( + test_hive_1984 string + ,test_hive_1982 string + ,test_hive_1985 string + ,test_hive_455 string + ,test_hive_1983 string + ,test_hive_1986 string + ,test_hive_1987 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1988 +PREHOOK: query: create table if not exists test_hive_1991 +( + test_hive_1984 string + ,test_hive_1982 string + ,test_hive_1985 string + ,test_hive_455 string + ,test_hive_1983 string + ,test_hive_1986 string + ,test_hive_1987 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1991 +POSTHOOK: query: create table if not exists test_hive_1991 +( + test_hive_1984 string + ,test_hive_1982 string + ,test_hive_1985 string + ,test_hive_455 string + ,test_hive_1983 string + ,test_hive_1986 string + ,test_hive_1987 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1991 +PREHOOK: query: drop table if exists test_hive_1990 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1990 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1990 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1990 +POSTHOOK: query: create table if not exists test_hive_1990 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1990 +PREHOOK: query: drop view if exists test_hive_1993 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1993 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1993 +as +select + cast(test_hive_1984 as int) as test_hive_1984 + ,cast(test_hive_1982 as int) as test_hive_1982 + ,cast(test_hive_1985 as int) as test_hive_1985 + ,cast(test_hive_455 as string) as test_hive_455 + ,cast(test_hive_1983 as string) as test_hive_1983 + ,cast(test_hive_1986 as string) as test_hive_1986 + ,cast(from_unixtime(unix_timestamp(test_hive_1987,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1987 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1991 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1991 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1993 +POSTHOOK: query: create view if not exists test_hive_1993 +as +select + cast(test_hive_1984 as int) as test_hive_1984 + ,cast(test_hive_1982 as int) as test_hive_1982 + ,cast(test_hive_1985 as int) as test_hive_1985 + ,cast(test_hive_455 as string) as test_hive_455 + ,cast(test_hive_1983 as string) as test_hive_1983 + ,cast(test_hive_1986 as string) as test_hive_1986 + ,cast(from_unixtime(unix_timestamp(test_hive_1987,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1987 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1991 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1991 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1993 +POSTHOOK: Lineage: test_hive_1993.creation_date EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.ds EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.ds_ts SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.source_file_name SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_1982 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1982, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_1983 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1983, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_1984 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1984, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_1985 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1985, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_1986 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1986, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_1987 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1987, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.test_hive_455 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_455, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1993.ts EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1992 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1992 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1992 +as +select + test_hive_1984 as test_hive_1984 + ,test_hive_1982 as test_hive_1982 + ,test_hive_1985 as test_hive_1985 + ,test_hive_455 as test_hive_455 + ,test_hive_1983 as test_hive_1983 + ,test_hive_1986 as test_hive_1986 + ,test_hive_1987 as test_hive_1987 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1993 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1991 +PREHOOK: Input: default@test_hive_1993 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1992 +POSTHOOK: query: create view test_hive_1992 +as +select + test_hive_1984 as test_hive_1984 + ,test_hive_1982 as test_hive_1982 + ,test_hive_1985 as test_hive_1985 + ,test_hive_455 as test_hive_455 + ,test_hive_1983 as test_hive_1983 + ,test_hive_1986 as test_hive_1986 + ,test_hive_1987 as test_hive_1987 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1993 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1991 +POSTHOOK: Input: default@test_hive_1993 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1992 +POSTHOOK: Lineage: test_hive_1992.creation_date EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.ds EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.ds_ts SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.source_file_name SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_1982 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1982, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_1983 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1983, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_1984 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1984, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_1985 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1985, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_1986 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1986, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_1987 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1987, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.test_hive_455 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_455, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1992.ts EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1989 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1989 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1989 +as +select t1.* +from test_hive_1992 t1 +inner join test_hive_1990 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1990 +PREHOOK: Input: default@test_hive_1991 +PREHOOK: Input: default@test_hive_1992 +PREHOOK: Input: default@test_hive_1993 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1989 +POSTHOOK: query: create view test_hive_1989 +as +select t1.* +from test_hive_1992 t1 +inner join test_hive_1990 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1990 +POSTHOOK: Input: default@test_hive_1991 +POSTHOOK: Input: default@test_hive_1992 +POSTHOOK: Input: default@test_hive_1993 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1989 +POSTHOOK: Lineage: test_hive_1989.creation_date EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.ds EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.ds_ts SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.source_file_name SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_1982 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1982, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_1983 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1983, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_1984 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1984, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_1985 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1985, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_1986 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1986, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_1987 EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_1987, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.test_hive_455 SIMPLE [(test_hive_1991)test_hive_1991.FieldSchema(name:test_hive_455, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1989.ts EXPRESSION [(test_hive_1991)test_hive_1991.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1976 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1976 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1976 +( + test_hive_1970 string + ,test_hive_1968 string + ,test_hive_1971 string + ,test_hive_454 string + ,test_hive_1969 string + ,test_hive_1974 string + ,test_hive_1973 string + ,test_hive_1972 string + ,test_hive_1975 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1976 +POSTHOOK: query: create table test_hive_1976 +( + test_hive_1970 string + ,test_hive_1968 string + ,test_hive_1971 string + ,test_hive_454 string + ,test_hive_1969 string + ,test_hive_1974 string + ,test_hive_1973 string + ,test_hive_1972 string + ,test_hive_1975 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1976 +PREHOOK: query: create table if not exists test_hive_1979 +( + test_hive_1970 string + ,test_hive_1968 string + ,test_hive_1971 string + ,test_hive_454 string + ,test_hive_1969 string + ,test_hive_1974 string + ,test_hive_1973 string + ,test_hive_1972 string + ,test_hive_1975 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1979 +POSTHOOK: query: create table if not exists test_hive_1979 +( + test_hive_1970 string + ,test_hive_1968 string + ,test_hive_1971 string + ,test_hive_454 string + ,test_hive_1969 string + ,test_hive_1974 string + ,test_hive_1973 string + ,test_hive_1972 string + ,test_hive_1975 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1979 +PREHOOK: query: drop table if exists test_hive_1978 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1978 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1978 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1978 +POSTHOOK: query: create table if not exists test_hive_1978 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1978 +PREHOOK: query: drop view if exists test_hive_1981 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1981 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1981 +as +select + cast(test_hive_1970 as int) as test_hive_1970 + ,cast(test_hive_1968 as int) as test_hive_1968 + ,cast(test_hive_1971 as int) as test_hive_1971 + ,cast(test_hive_454 as string) as test_hive_454 + ,cast(test_hive_1969 as string) as test_hive_1969 + ,cast(test_hive_1974 as string) as test_hive_1974 + ,cast(test_hive_1973 as string) as test_hive_1973 + ,cast(test_hive_1972 as string) as test_hive_1972 + ,cast(from_unixtime(unix_timestamp(test_hive_1975,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1975 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1979 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1979 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1981 +POSTHOOK: query: create view if not exists test_hive_1981 +as +select + cast(test_hive_1970 as int) as test_hive_1970 + ,cast(test_hive_1968 as int) as test_hive_1968 + ,cast(test_hive_1971 as int) as test_hive_1971 + ,cast(test_hive_454 as string) as test_hive_454 + ,cast(test_hive_1969 as string) as test_hive_1969 + ,cast(test_hive_1974 as string) as test_hive_1974 + ,cast(test_hive_1973 as string) as test_hive_1973 + ,cast(test_hive_1972 as string) as test_hive_1972 + ,cast(from_unixtime(unix_timestamp(test_hive_1975,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1975 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1979 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1979 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1981 +POSTHOOK: Lineage: test_hive_1981.creation_date EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.ds EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.ds_ts SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.source_file_name SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1968 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1968, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1969 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1969, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1970 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1970, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1971 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1971, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1972 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1972, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1973 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1973, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1974 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1974, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_1975 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1975, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.test_hive_454 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_454, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1981.ts EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1980 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1980 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1980 +as +select + test_hive_1970 as test_hive_1970 + ,test_hive_1968 as test_hive_1968 + ,test_hive_1971 as test_hive_1971 + ,test_hive_454 as test_hive_454 + ,test_hive_1969 as test_hive_1969 + ,test_hive_1974 as test_hive_1974 + ,test_hive_1973 as test_hive_1973 + ,test_hive_1972 as test_hive_1972 + ,test_hive_1975 as test_hive_1975 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1981 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1979 +PREHOOK: Input: default@test_hive_1981 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1980 +POSTHOOK: query: create view test_hive_1980 +as +select + test_hive_1970 as test_hive_1970 + ,test_hive_1968 as test_hive_1968 + ,test_hive_1971 as test_hive_1971 + ,test_hive_454 as test_hive_454 + ,test_hive_1969 as test_hive_1969 + ,test_hive_1974 as test_hive_1974 + ,test_hive_1973 as test_hive_1973 + ,test_hive_1972 as test_hive_1972 + ,test_hive_1975 as test_hive_1975 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1981 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1979 +POSTHOOK: Input: default@test_hive_1981 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1980 +POSTHOOK: Lineage: test_hive_1980.creation_date EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.ds EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.ds_ts SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.source_file_name SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1968 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1968, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1969 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1969, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1970 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1970, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1971 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1971, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1972 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1972, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1973 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1973, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1974 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1974, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_1975 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1975, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.test_hive_454 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_454, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1980.ts EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1977 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1977 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1977 +as +select t1.* +from test_hive_1980 t1 +inner join test_hive_1978 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1978 +PREHOOK: Input: default@test_hive_1979 +PREHOOK: Input: default@test_hive_1980 +PREHOOK: Input: default@test_hive_1981 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1977 +POSTHOOK: query: create view test_hive_1977 +as +select t1.* +from test_hive_1980 t1 +inner join test_hive_1978 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1978 +POSTHOOK: Input: default@test_hive_1979 +POSTHOOK: Input: default@test_hive_1980 +POSTHOOK: Input: default@test_hive_1981 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1977 +POSTHOOK: Lineage: test_hive_1977.creation_date EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.ds EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.ds_ts SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.source_file_name SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1968 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1968, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1969 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1969, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1970 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1970, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1971 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1971, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1972 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1972, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1973 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1973, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1974 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1974, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_1975 EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_1975, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.test_hive_454 SIMPLE [(test_hive_1979)test_hive_1979.FieldSchema(name:test_hive_454, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1977.ts EXPRESSION [(test_hive_1979)test_hive_1979.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1962 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1962 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1962 +( + test_hive_1958 string + ,test_hive_1956 string + ,test_hive_1959 string + ,test_hive_453 string + ,test_hive_1957 string + ,test_hive_1960 string + ,test_hive_1961 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1962 +POSTHOOK: query: create table test_hive_1962 +( + test_hive_1958 string + ,test_hive_1956 string + ,test_hive_1959 string + ,test_hive_453 string + ,test_hive_1957 string + ,test_hive_1960 string + ,test_hive_1961 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1962 +PREHOOK: query: create table if not exists test_hive_1965 +( + test_hive_1958 string + ,test_hive_1956 string + ,test_hive_1959 string + ,test_hive_453 string + ,test_hive_1957 string + ,test_hive_1960 string + ,test_hive_1961 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1965 +POSTHOOK: query: create table if not exists test_hive_1965 +( + test_hive_1958 string + ,test_hive_1956 string + ,test_hive_1959 string + ,test_hive_453 string + ,test_hive_1957 string + ,test_hive_1960 string + ,test_hive_1961 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1965 +PREHOOK: query: drop table if exists test_hive_1964 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1964 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1964 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1964 +POSTHOOK: query: create table if not exists test_hive_1964 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1964 +PREHOOK: query: drop view if exists test_hive_1967 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1967 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1967 +as +select + cast(test_hive_1958 as int) as test_hive_1958 + ,cast(test_hive_1956 as int) as test_hive_1956 + ,cast(test_hive_1959 as int) as test_hive_1959 + ,cast(test_hive_453 as string) as test_hive_453 + ,cast(test_hive_1957 as string) as test_hive_1957 + ,cast(test_hive_1960 as string) as test_hive_1960 + ,cast(from_unixtime(unix_timestamp(test_hive_1961,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1961 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1965 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1965 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1967 +POSTHOOK: query: create view if not exists test_hive_1967 +as +select + cast(test_hive_1958 as int) as test_hive_1958 + ,cast(test_hive_1956 as int) as test_hive_1956 + ,cast(test_hive_1959 as int) as test_hive_1959 + ,cast(test_hive_453 as string) as test_hive_453 + ,cast(test_hive_1957 as string) as test_hive_1957 + ,cast(test_hive_1960 as string) as test_hive_1960 + ,cast(from_unixtime(unix_timestamp(test_hive_1961,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1961 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1965 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1965 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1967 +POSTHOOK: Lineage: test_hive_1967.creation_date EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.ds EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.ds_ts SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.source_file_name SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_1956 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1956, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_1957 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1957, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_1958 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1958, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_1959 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1959, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_1960 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1960, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_1961 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1961, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.test_hive_453 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_453, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1967.ts EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1966 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1966 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1966 +as +select + test_hive_1958 as test_hive_1958 + ,test_hive_1956 as test_hive_1956 + ,test_hive_1959 as test_hive_1959 + ,test_hive_453 as test_hive_453 + ,test_hive_1957 as test_hive_1957 + ,test_hive_1960 as test_hive_1960 + ,test_hive_1961 as test_hive_1961 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1967 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1965 +PREHOOK: Input: default@test_hive_1967 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1966 +POSTHOOK: query: create view test_hive_1966 +as +select + test_hive_1958 as test_hive_1958 + ,test_hive_1956 as test_hive_1956 + ,test_hive_1959 as test_hive_1959 + ,test_hive_453 as test_hive_453 + ,test_hive_1957 as test_hive_1957 + ,test_hive_1960 as test_hive_1960 + ,test_hive_1961 as test_hive_1961 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1967 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1965 +POSTHOOK: Input: default@test_hive_1967 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1966 +POSTHOOK: Lineage: test_hive_1966.creation_date EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.ds EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.ds_ts SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.source_file_name SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_1956 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1956, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_1957 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1957, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_1958 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1958, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_1959 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1959, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_1960 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1960, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_1961 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1961, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.test_hive_453 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_453, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1966.ts EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1963 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1963 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1963 +as +select t1.* +from test_hive_1966 t1 +inner join test_hive_1964 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1964 +PREHOOK: Input: default@test_hive_1965 +PREHOOK: Input: default@test_hive_1966 +PREHOOK: Input: default@test_hive_1967 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1963 +POSTHOOK: query: create view test_hive_1963 +as +select t1.* +from test_hive_1966 t1 +inner join test_hive_1964 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1964 +POSTHOOK: Input: default@test_hive_1965 +POSTHOOK: Input: default@test_hive_1966 +POSTHOOK: Input: default@test_hive_1967 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1963 +POSTHOOK: Lineage: test_hive_1963.creation_date EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.ds EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.ds_ts SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.source_file_name SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_1956 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1956, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_1957 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1957, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_1958 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1958, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_1959 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1959, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_1960 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1960, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_1961 EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_1961, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.test_hive_453 SIMPLE [(test_hive_1965)test_hive_1965.FieldSchema(name:test_hive_453, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1963.ts EXPRESSION [(test_hive_1965)test_hive_1965.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1950 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1950 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1950 +( + test_hive_1946 string + ,test_hive_1944 string + ,test_hive_1947 string + ,test_hive_452 string + ,test_hive_1945 string + ,test_hive_1948 string + ,test_hive_1949 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1950 +POSTHOOK: query: create table test_hive_1950 +( + test_hive_1946 string + ,test_hive_1944 string + ,test_hive_1947 string + ,test_hive_452 string + ,test_hive_1945 string + ,test_hive_1948 string + ,test_hive_1949 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1950 +PREHOOK: query: create table if not exists test_hive_1953 +( + test_hive_1946 string + ,test_hive_1944 string + ,test_hive_1947 string + ,test_hive_452 string + ,test_hive_1945 string + ,test_hive_1948 string + ,test_hive_1949 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1953 +POSTHOOK: query: create table if not exists test_hive_1953 +( + test_hive_1946 string + ,test_hive_1944 string + ,test_hive_1947 string + ,test_hive_452 string + ,test_hive_1945 string + ,test_hive_1948 string + ,test_hive_1949 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1953 +PREHOOK: query: drop table if exists test_hive_1952 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1952 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1952 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1952 +POSTHOOK: query: create table if not exists test_hive_1952 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1952 +PREHOOK: query: drop view if exists test_hive_1955 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1955 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1955 +as +select + cast(test_hive_1946 as int) as test_hive_1946 + ,cast(test_hive_1944 as int) as test_hive_1944 + ,cast(test_hive_1947 as int) as test_hive_1947 + ,cast(test_hive_452 as string) as test_hive_452 + ,cast(test_hive_1945 as string) as test_hive_1945 + ,cast(test_hive_1948 as string) as test_hive_1948 + ,cast(from_unixtime(unix_timestamp(test_hive_1949,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1949 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1953 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1953 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1955 +POSTHOOK: query: create view if not exists test_hive_1955 +as +select + cast(test_hive_1946 as int) as test_hive_1946 + ,cast(test_hive_1944 as int) as test_hive_1944 + ,cast(test_hive_1947 as int) as test_hive_1947 + ,cast(test_hive_452 as string) as test_hive_452 + ,cast(test_hive_1945 as string) as test_hive_1945 + ,cast(test_hive_1948 as string) as test_hive_1948 + ,cast(from_unixtime(unix_timestamp(test_hive_1949,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1949 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1953 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1953 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1955 +POSTHOOK: Lineage: test_hive_1955.creation_date EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.ds EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.ds_ts SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.source_file_name SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_1944 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1944, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_1945 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1945, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_1946 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1946, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_1947 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1947, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_1948 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1948, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_1949 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1949, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.test_hive_452 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_452, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1955.ts EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1954 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1954 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1954 +as +select + test_hive_1946 as test_hive_1946 + ,test_hive_1944 as test_hive_1944 + ,test_hive_1947 as test_hive_1947 + ,test_hive_452 as test_hive_452 + ,test_hive_1945 as test_hive_1945 + ,test_hive_1948 as test_hive_1948 + ,test_hive_1949 as test_hive_1949 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1955 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1953 +PREHOOK: Input: default@test_hive_1955 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1954 +POSTHOOK: query: create view test_hive_1954 +as +select + test_hive_1946 as test_hive_1946 + ,test_hive_1944 as test_hive_1944 + ,test_hive_1947 as test_hive_1947 + ,test_hive_452 as test_hive_452 + ,test_hive_1945 as test_hive_1945 + ,test_hive_1948 as test_hive_1948 + ,test_hive_1949 as test_hive_1949 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1955 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1953 +POSTHOOK: Input: default@test_hive_1955 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1954 +POSTHOOK: Lineage: test_hive_1954.creation_date EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.ds EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.ds_ts SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.source_file_name SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_1944 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1944, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_1945 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1945, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_1946 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1946, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_1947 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1947, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_1948 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1948, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_1949 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1949, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.test_hive_452 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_452, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1954.ts EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1951 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1951 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1951 +as +select t1.* +from test_hive_1954 t1 +inner join test_hive_1952 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1952 +PREHOOK: Input: default@test_hive_1953 +PREHOOK: Input: default@test_hive_1954 +PREHOOK: Input: default@test_hive_1955 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1951 +POSTHOOK: query: create view test_hive_1951 +as +select t1.* +from test_hive_1954 t1 +inner join test_hive_1952 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1952 +POSTHOOK: Input: default@test_hive_1953 +POSTHOOK: Input: default@test_hive_1954 +POSTHOOK: Input: default@test_hive_1955 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1951 +POSTHOOK: Lineage: test_hive_1951.creation_date EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.ds EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.ds_ts SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.source_file_name SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_1944 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1944, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_1945 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1945, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_1946 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1946, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_1947 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1947, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_1948 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1948, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_1949 EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_1949, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.test_hive_452 SIMPLE [(test_hive_1953)test_hive_1953.FieldSchema(name:test_hive_452, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1951.ts EXPRESSION [(test_hive_1953)test_hive_1953.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1938 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1938 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1938 +( + test_hive_1933 string + ,test_hive_1931 string + ,test_hive_1934 string + ,test_hive_451 string + ,test_hive_1932 string + ,test_hive_1936 string + ,test_hive_1935 string + ,test_hive_1937 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1938 +POSTHOOK: query: create table test_hive_1938 +( + test_hive_1933 string + ,test_hive_1931 string + ,test_hive_1934 string + ,test_hive_451 string + ,test_hive_1932 string + ,test_hive_1936 string + ,test_hive_1935 string + ,test_hive_1937 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1938 +PREHOOK: query: create table if not exists test_hive_1941 +( + test_hive_1933 string + ,test_hive_1931 string + ,test_hive_1934 string + ,test_hive_451 string + ,test_hive_1932 string + ,test_hive_1936 string + ,test_hive_1935 string + ,test_hive_1937 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1941 +POSTHOOK: query: create table if not exists test_hive_1941 +( + test_hive_1933 string + ,test_hive_1931 string + ,test_hive_1934 string + ,test_hive_451 string + ,test_hive_1932 string + ,test_hive_1936 string + ,test_hive_1935 string + ,test_hive_1937 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1941 +PREHOOK: query: drop table if exists test_hive_1940 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1940 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1940 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1940 +POSTHOOK: query: create table if not exists test_hive_1940 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1940 +PREHOOK: query: drop view if exists test_hive_1943 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1943 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1943 +as +select + cast(test_hive_1933 as int) as test_hive_1933 + ,cast(test_hive_1931 as int) as test_hive_1931 + ,cast(test_hive_1934 as int) as test_hive_1934 + ,cast(test_hive_451 as string) as test_hive_451 + ,cast(test_hive_1932 as string) as test_hive_1932 + ,cast(test_hive_1936 as string) as test_hive_1936 + ,cast(test_hive_1935 as string) as test_hive_1935 + ,cast(from_unixtime(unix_timestamp(test_hive_1937,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1937 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1941 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1941 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1943 +POSTHOOK: query: create view if not exists test_hive_1943 +as +select + cast(test_hive_1933 as int) as test_hive_1933 + ,cast(test_hive_1931 as int) as test_hive_1931 + ,cast(test_hive_1934 as int) as test_hive_1934 + ,cast(test_hive_451 as string) as test_hive_451 + ,cast(test_hive_1932 as string) as test_hive_1932 + ,cast(test_hive_1936 as string) as test_hive_1936 + ,cast(test_hive_1935 as string) as test_hive_1935 + ,cast(from_unixtime(unix_timestamp(test_hive_1937,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1937 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1941 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1941 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1943 +POSTHOOK: Lineage: test_hive_1943.creation_date EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.ds EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.ds_ts SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.source_file_name SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1931 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1931, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1932 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1932, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1933 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1933, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1934 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1934, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1935 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1935, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1936 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1936, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_1937 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1937, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.test_hive_451 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_451, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1943.ts EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1942 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1942 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1942 +as +select + test_hive_1933 as test_hive_1933 + ,test_hive_1931 as test_hive_1931 + ,test_hive_1934 as test_hive_1934 + ,test_hive_451 as test_hive_451 + ,test_hive_1932 as test_hive_1932 + ,test_hive_1936 as test_hive_1936 + ,test_hive_1935 as test_hive_1935 + ,test_hive_1937 as test_hive_1937 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1943 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1941 +PREHOOK: Input: default@test_hive_1943 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1942 +POSTHOOK: query: create view test_hive_1942 +as +select + test_hive_1933 as test_hive_1933 + ,test_hive_1931 as test_hive_1931 + ,test_hive_1934 as test_hive_1934 + ,test_hive_451 as test_hive_451 + ,test_hive_1932 as test_hive_1932 + ,test_hive_1936 as test_hive_1936 + ,test_hive_1935 as test_hive_1935 + ,test_hive_1937 as test_hive_1937 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1943 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1941 +POSTHOOK: Input: default@test_hive_1943 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1942 +POSTHOOK: Lineage: test_hive_1942.creation_date EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.ds EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.ds_ts SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.source_file_name SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1931 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1931, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1932 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1932, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1933 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1933, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1934 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1934, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1935 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1935, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1936 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1936, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_1937 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1937, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.test_hive_451 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_451, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1942.ts EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1939 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1939 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1939 +as +select t1.* +from test_hive_1942 t1 +inner join test_hive_1940 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1940 +PREHOOK: Input: default@test_hive_1941 +PREHOOK: Input: default@test_hive_1942 +PREHOOK: Input: default@test_hive_1943 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1939 +POSTHOOK: query: create view test_hive_1939 +as +select t1.* +from test_hive_1942 t1 +inner join test_hive_1940 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1940 +POSTHOOK: Input: default@test_hive_1941 +POSTHOOK: Input: default@test_hive_1942 +POSTHOOK: Input: default@test_hive_1943 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1939 +POSTHOOK: Lineage: test_hive_1939.creation_date EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.ds EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.ds_ts SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.source_file_name SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1931 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1931, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1932 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1932, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1933 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1933, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1934 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1934, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1935 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1935, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1936 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1936, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_1937 EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_1937, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.test_hive_451 SIMPLE [(test_hive_1941)test_hive_1941.FieldSchema(name:test_hive_451, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1939.ts EXPRESSION [(test_hive_1941)test_hive_1941.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1925 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1925 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1925 +( + test_hive_1919 string + ,test_hive_1916 string + ,test_hive_1920 string + ,test_hive_1918 string + ,test_hive_1917 string + ,test_hive_1923 string + ,test_hive_1922 string + ,test_hive_1921 string + ,test_hive_1924 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1925 +POSTHOOK: query: create table test_hive_1925 +( + test_hive_1919 string + ,test_hive_1916 string + ,test_hive_1920 string + ,test_hive_1918 string + ,test_hive_1917 string + ,test_hive_1923 string + ,test_hive_1922 string + ,test_hive_1921 string + ,test_hive_1924 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1925 +PREHOOK: query: create table if not exists test_hive_1928 +( + test_hive_1919 string + ,test_hive_1916 string + ,test_hive_1920 string + ,test_hive_1918 string + ,test_hive_1917 string + ,test_hive_1923 string + ,test_hive_1922 string + ,test_hive_1921 string + ,test_hive_1924 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1928 +POSTHOOK: query: create table if not exists test_hive_1928 +( + test_hive_1919 string + ,test_hive_1916 string + ,test_hive_1920 string + ,test_hive_1918 string + ,test_hive_1917 string + ,test_hive_1923 string + ,test_hive_1922 string + ,test_hive_1921 string + ,test_hive_1924 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1928 +PREHOOK: query: drop table if exists test_hive_1927 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1927 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1927 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1927 +POSTHOOK: query: create table if not exists test_hive_1927 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1927 +PREHOOK: query: drop view if exists test_hive_1930 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1930 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1930 +as +select + cast(test_hive_1919 as int) as test_hive_1919 + ,cast(test_hive_1916 as int) as test_hive_1916 + ,cast(test_hive_1920 as int) as test_hive_1920 + ,cast(test_hive_1918 as string) as test_hive_1918 + ,cast(test_hive_1917 as string) as test_hive_1917 + ,cast(test_hive_1923 as string) as test_hive_1923 + ,cast(test_hive_1922 as string) as test_hive_1922 + ,cast(test_hive_1921 as string) as test_hive_1921 + ,cast(from_unixtime(unix_timestamp(test_hive_1924,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1924 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1928 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1928 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1930 +POSTHOOK: query: create view if not exists test_hive_1930 +as +select + cast(test_hive_1919 as int) as test_hive_1919 + ,cast(test_hive_1916 as int) as test_hive_1916 + ,cast(test_hive_1920 as int) as test_hive_1920 + ,cast(test_hive_1918 as string) as test_hive_1918 + ,cast(test_hive_1917 as string) as test_hive_1917 + ,cast(test_hive_1923 as string) as test_hive_1923 + ,cast(test_hive_1922 as string) as test_hive_1922 + ,cast(test_hive_1921 as string) as test_hive_1921 + ,cast(from_unixtime(unix_timestamp(test_hive_1924,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1924 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1928 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1928 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1930 +POSTHOOK: Lineage: test_hive_1930.creation_date EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.ds EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.ds_ts SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.source_file_name SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1916 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1916, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1917 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1917, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1918 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1918, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1919 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1919, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1920 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1920, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1921 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1921, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1922 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1922, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1923 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1923, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.test_hive_1924 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1924, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1930.ts EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1929 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1929 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1929 +as +select + test_hive_1919 as test_hive_1919 + ,test_hive_1916 as test_hive_1916 + ,test_hive_1920 as test_hive_1920 + ,test_hive_1918 as test_hive_1918 + ,test_hive_1917 as test_hive_1917 + ,test_hive_1923 as test_hive_1923 + ,test_hive_1922 as test_hive_1922 + ,test_hive_1921 as test_hive_1921 + ,test_hive_1924 as test_hive_1924 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1930 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1928 +PREHOOK: Input: default@test_hive_1930 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1929 +POSTHOOK: query: create view test_hive_1929 +as +select + test_hive_1919 as test_hive_1919 + ,test_hive_1916 as test_hive_1916 + ,test_hive_1920 as test_hive_1920 + ,test_hive_1918 as test_hive_1918 + ,test_hive_1917 as test_hive_1917 + ,test_hive_1923 as test_hive_1923 + ,test_hive_1922 as test_hive_1922 + ,test_hive_1921 as test_hive_1921 + ,test_hive_1924 as test_hive_1924 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1930 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1928 +POSTHOOK: Input: default@test_hive_1930 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1929 +POSTHOOK: Lineage: test_hive_1929.creation_date EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.ds EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.ds_ts SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.source_file_name SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1916 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1916, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1917 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1917, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1918 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1918, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1919 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1919, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1920 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1920, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1921 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1921, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1922 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1922, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1923 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1923, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.test_hive_1924 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1924, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1929.ts EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1926 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1926 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1926 +as +select t1.* +from test_hive_1929 t1 +inner join test_hive_1927 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1927 +PREHOOK: Input: default@test_hive_1928 +PREHOOK: Input: default@test_hive_1929 +PREHOOK: Input: default@test_hive_1930 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1926 +POSTHOOK: query: create view test_hive_1926 +as +select t1.* +from test_hive_1929 t1 +inner join test_hive_1927 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1927 +POSTHOOK: Input: default@test_hive_1928 +POSTHOOK: Input: default@test_hive_1929 +POSTHOOK: Input: default@test_hive_1930 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1926 +POSTHOOK: Lineage: test_hive_1926.creation_date EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.ds EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.ds_ts SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.source_file_name SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1916 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1916, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1917 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1917, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1918 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1918, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1919 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1919, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1920 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1920, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1921 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1921, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1922 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1922, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1923 SIMPLE [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1923, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.test_hive_1924 EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:test_hive_1924, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1926.ts EXPRESSION [(test_hive_1928)test_hive_1928.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1910 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1910 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1910 +( + test_hive_1905 string + ,test_hive_1903 string + ,test_hive_1906 string + ,test_hive_450 string + ,test_hive_1904 string + ,test_hive_1908 string + ,test_hive_1907 string + ,test_hive_1909 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1910 +POSTHOOK: query: create table test_hive_1910 +( + test_hive_1905 string + ,test_hive_1903 string + ,test_hive_1906 string + ,test_hive_450 string + ,test_hive_1904 string + ,test_hive_1908 string + ,test_hive_1907 string + ,test_hive_1909 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1910 +PREHOOK: query: create table if not exists test_hive_1913 +( + test_hive_1905 string + ,test_hive_1903 string + ,test_hive_1906 string + ,test_hive_450 string + ,test_hive_1904 string + ,test_hive_1908 string + ,test_hive_1907 string + ,test_hive_1909 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1913 +POSTHOOK: query: create table if not exists test_hive_1913 +( + test_hive_1905 string + ,test_hive_1903 string + ,test_hive_1906 string + ,test_hive_450 string + ,test_hive_1904 string + ,test_hive_1908 string + ,test_hive_1907 string + ,test_hive_1909 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1913 +PREHOOK: query: drop table if exists test_hive_1912 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1912 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1912 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1912 +POSTHOOK: query: create table if not exists test_hive_1912 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1912 +PREHOOK: query: drop view if exists test_hive_1915 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1915 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1915 +as +select + cast(test_hive_1905 as int) as test_hive_1905 + ,cast(test_hive_1903 as int) as test_hive_1903 + ,cast(test_hive_1906 as int) as test_hive_1906 + ,cast(test_hive_450 as string) as test_hive_450 + ,cast(test_hive_1904 as string) as test_hive_1904 + ,cast(test_hive_1908 as string) as test_hive_1908 + ,cast(test_hive_1907 as string) as test_hive_1907 + ,cast(from_unixtime(unix_timestamp(test_hive_1909,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1909 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1913 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1913 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1915 +POSTHOOK: query: create view if not exists test_hive_1915 +as +select + cast(test_hive_1905 as int) as test_hive_1905 + ,cast(test_hive_1903 as int) as test_hive_1903 + ,cast(test_hive_1906 as int) as test_hive_1906 + ,cast(test_hive_450 as string) as test_hive_450 + ,cast(test_hive_1904 as string) as test_hive_1904 + ,cast(test_hive_1908 as string) as test_hive_1908 + ,cast(test_hive_1907 as string) as test_hive_1907 + ,cast(from_unixtime(unix_timestamp(test_hive_1909,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1909 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1913 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1913 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1915 +POSTHOOK: Lineage: test_hive_1915.creation_date EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.ds EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.ds_ts SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.source_file_name SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1903 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1903, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1904 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1904, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1905 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1905, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1906 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1906, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1907 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1907, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1908 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1908, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_1909 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1909, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.test_hive_450 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_450, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1915.ts EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1914 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1914 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1914 +as +select + test_hive_1905 as test_hive_1905 + ,test_hive_1903 as test_hive_1903 + ,test_hive_1906 as test_hive_1906 + ,test_hive_450 as test_hive_450 + ,test_hive_1904 as test_hive_1904 + ,test_hive_1908 as test_hive_1908 + ,test_hive_1907 as test_hive_1907 + ,test_hive_1909 as test_hive_1909 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1915 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1913 +PREHOOK: Input: default@test_hive_1915 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1914 +POSTHOOK: query: create view test_hive_1914 +as +select + test_hive_1905 as test_hive_1905 + ,test_hive_1903 as test_hive_1903 + ,test_hive_1906 as test_hive_1906 + ,test_hive_450 as test_hive_450 + ,test_hive_1904 as test_hive_1904 + ,test_hive_1908 as test_hive_1908 + ,test_hive_1907 as test_hive_1907 + ,test_hive_1909 as test_hive_1909 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1915 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1913 +POSTHOOK: Input: default@test_hive_1915 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1914 +POSTHOOK: Lineage: test_hive_1914.creation_date EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.ds EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.ds_ts SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.source_file_name SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1903 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1903, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1904 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1904, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1905 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1905, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1906 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1906, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1907 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1907, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1908 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1908, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_1909 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1909, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.test_hive_450 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_450, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1914.ts EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1911 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1911 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1911 +as +select t1.* +from test_hive_1914 t1 +inner join test_hive_1912 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1912 +PREHOOK: Input: default@test_hive_1913 +PREHOOK: Input: default@test_hive_1914 +PREHOOK: Input: default@test_hive_1915 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1911 +POSTHOOK: query: create view test_hive_1911 +as +select t1.* +from test_hive_1914 t1 +inner join test_hive_1912 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1912 +POSTHOOK: Input: default@test_hive_1913 +POSTHOOK: Input: default@test_hive_1914 +POSTHOOK: Input: default@test_hive_1915 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1911 +POSTHOOK: Lineage: test_hive_1911.creation_date EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.ds EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.ds_ts SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.source_file_name SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1903 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1903, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1904 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1904, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1905 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1905, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1906 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1906, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1907 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1907, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1908 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1908, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_1909 EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_1909, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.test_hive_450 SIMPLE [(test_hive_1913)test_hive_1913.FieldSchema(name:test_hive_450, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1911.ts EXPRESSION [(test_hive_1913)test_hive_1913.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1897 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1897 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1897 +( + test_hive_1893 string + ,test_hive_1891 string + ,test_hive_1894 string + ,test_hive_449 string + ,test_hive_1892 string + ,test_hive_1895 string + ,test_hive_1896 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1897 +POSTHOOK: query: create table test_hive_1897 +( + test_hive_1893 string + ,test_hive_1891 string + ,test_hive_1894 string + ,test_hive_449 string + ,test_hive_1892 string + ,test_hive_1895 string + ,test_hive_1896 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1897 +PREHOOK: query: create table if not exists test_hive_1900 +( + test_hive_1893 string + ,test_hive_1891 string + ,test_hive_1894 string + ,test_hive_449 string + ,test_hive_1892 string + ,test_hive_1895 string + ,test_hive_1896 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1900 +POSTHOOK: query: create table if not exists test_hive_1900 +( + test_hive_1893 string + ,test_hive_1891 string + ,test_hive_1894 string + ,test_hive_449 string + ,test_hive_1892 string + ,test_hive_1895 string + ,test_hive_1896 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1900 +PREHOOK: query: drop table if exists test_hive_1899 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1899 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1899 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1899 +POSTHOOK: query: create table if not exists test_hive_1899 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1899 +PREHOOK: query: drop view if exists test_hive_1902 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1902 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1902 +as +select + cast(test_hive_1893 as int) as test_hive_1893 + ,cast(test_hive_1891 as int) as test_hive_1891 + ,cast(test_hive_1894 as int) as test_hive_1894 + ,cast(test_hive_449 as string) as test_hive_449 + ,cast(test_hive_1892 as string) as test_hive_1892 + ,cast(test_hive_1895 as string) as test_hive_1895 + ,cast(from_unixtime(unix_timestamp(test_hive_1896,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1896 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1900 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1900 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1902 +POSTHOOK: query: create view if not exists test_hive_1902 +as +select + cast(test_hive_1893 as int) as test_hive_1893 + ,cast(test_hive_1891 as int) as test_hive_1891 + ,cast(test_hive_1894 as int) as test_hive_1894 + ,cast(test_hive_449 as string) as test_hive_449 + ,cast(test_hive_1892 as string) as test_hive_1892 + ,cast(test_hive_1895 as string) as test_hive_1895 + ,cast(from_unixtime(unix_timestamp(test_hive_1896,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1896 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1900 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1900 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1902 +POSTHOOK: Lineage: test_hive_1902.creation_date EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.ds EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.ds_ts SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.source_file_name SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_1891 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1891, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_1892 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1892, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_1893 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1893, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_1894 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1894, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_1895 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1895, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_1896 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1896, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.test_hive_449 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_449, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1902.ts EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1901 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1901 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1901 +as +select + test_hive_1893 as test_hive_1893 + ,test_hive_1891 as test_hive_1891 + ,test_hive_1894 as test_hive_1894 + ,test_hive_449 as test_hive_449 + ,test_hive_1892 as test_hive_1892 + ,test_hive_1895 as test_hive_1895 + ,test_hive_1896 as test_hive_1896 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1902 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1900 +PREHOOK: Input: default@test_hive_1902 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1901 +POSTHOOK: query: create view test_hive_1901 +as +select + test_hive_1893 as test_hive_1893 + ,test_hive_1891 as test_hive_1891 + ,test_hive_1894 as test_hive_1894 + ,test_hive_449 as test_hive_449 + ,test_hive_1892 as test_hive_1892 + ,test_hive_1895 as test_hive_1895 + ,test_hive_1896 as test_hive_1896 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1902 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1900 +POSTHOOK: Input: default@test_hive_1902 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1901 +POSTHOOK: Lineage: test_hive_1901.creation_date EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.ds EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.ds_ts SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.source_file_name SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_1891 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1891, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_1892 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1892, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_1893 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1893, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_1894 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1894, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_1895 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1895, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_1896 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1896, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.test_hive_449 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_449, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1901.ts EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1898 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1898 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1898 +as +select t1.* +from test_hive_1901 t1 +inner join test_hive_1899 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1899 +PREHOOK: Input: default@test_hive_1900 +PREHOOK: Input: default@test_hive_1901 +PREHOOK: Input: default@test_hive_1902 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1898 +POSTHOOK: query: create view test_hive_1898 +as +select t1.* +from test_hive_1901 t1 +inner join test_hive_1899 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1899 +POSTHOOK: Input: default@test_hive_1900 +POSTHOOK: Input: default@test_hive_1901 +POSTHOOK: Input: default@test_hive_1902 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1898 +POSTHOOK: Lineage: test_hive_1898.creation_date EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.ds EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.ds_ts SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.source_file_name SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_1891 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1891, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_1892 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1892, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_1893 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1893, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_1894 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1894, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_1895 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1895, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_1896 EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_1896, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.test_hive_449 SIMPLE [(test_hive_1900)test_hive_1900.FieldSchema(name:test_hive_449, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1898.ts EXPRESSION [(test_hive_1900)test_hive_1900.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1885 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1885 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1885 +( + test_hive_1881 string + ,test_hive_1879 string + ,test_hive_1882 string + ,test_hive_448 string + ,test_hive_1880 string + ,test_hive_1883 string + ,test_hive_1884 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1885 +POSTHOOK: query: create table test_hive_1885 +( + test_hive_1881 string + ,test_hive_1879 string + ,test_hive_1882 string + ,test_hive_448 string + ,test_hive_1880 string + ,test_hive_1883 string + ,test_hive_1884 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1885 +PREHOOK: query: create table if not exists test_hive_1888 +( + test_hive_1881 string + ,test_hive_1879 string + ,test_hive_1882 string + ,test_hive_448 string + ,test_hive_1880 string + ,test_hive_1883 string + ,test_hive_1884 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1888 +POSTHOOK: query: create table if not exists test_hive_1888 +( + test_hive_1881 string + ,test_hive_1879 string + ,test_hive_1882 string + ,test_hive_448 string + ,test_hive_1880 string + ,test_hive_1883 string + ,test_hive_1884 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1888 +PREHOOK: query: drop table if exists test_hive_1887 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1887 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1887 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1887 +POSTHOOK: query: create table if not exists test_hive_1887 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1887 +PREHOOK: query: drop view if exists test_hive_1890 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1890 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1890 +as +select + cast(test_hive_1881 as int) as test_hive_1881 + ,cast(test_hive_1879 as int) as test_hive_1879 + ,cast(test_hive_1882 as int) as test_hive_1882 + ,cast(test_hive_448 as string) as test_hive_448 + ,cast(test_hive_1880 as string) as test_hive_1880 + ,cast(test_hive_1883 as string) as test_hive_1883 + ,cast(from_unixtime(unix_timestamp(test_hive_1884,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1884 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1888 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1888 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1890 +POSTHOOK: query: create view if not exists test_hive_1890 +as +select + cast(test_hive_1881 as int) as test_hive_1881 + ,cast(test_hive_1879 as int) as test_hive_1879 + ,cast(test_hive_1882 as int) as test_hive_1882 + ,cast(test_hive_448 as string) as test_hive_448 + ,cast(test_hive_1880 as string) as test_hive_1880 + ,cast(test_hive_1883 as string) as test_hive_1883 + ,cast(from_unixtime(unix_timestamp(test_hive_1884,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1884 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1888 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1888 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1890 +POSTHOOK: Lineage: test_hive_1890.creation_date EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.ds EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.ds_ts SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.source_file_name SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_1879 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1879, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_1880 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1880, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_1881 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1881, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_1882 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1882, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_1883 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1883, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_1884 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1884, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.test_hive_448 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_448, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1890.ts EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1889 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1889 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1889 +as +select + test_hive_1881 as test_hive_1881 + ,test_hive_1879 as test_hive_1879 + ,test_hive_1882 as test_hive_1882 + ,test_hive_448 as test_hive_448 + ,test_hive_1880 as test_hive_1880 + ,test_hive_1883 as test_hive_1883 + ,test_hive_1884 as test_hive_1884 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1890 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1888 +PREHOOK: Input: default@test_hive_1890 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1889 +POSTHOOK: query: create view test_hive_1889 +as +select + test_hive_1881 as test_hive_1881 + ,test_hive_1879 as test_hive_1879 + ,test_hive_1882 as test_hive_1882 + ,test_hive_448 as test_hive_448 + ,test_hive_1880 as test_hive_1880 + ,test_hive_1883 as test_hive_1883 + ,test_hive_1884 as test_hive_1884 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1890 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1888 +POSTHOOK: Input: default@test_hive_1890 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1889 +POSTHOOK: Lineage: test_hive_1889.creation_date EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.ds EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.ds_ts SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.source_file_name SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_1879 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1879, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_1880 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1880, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_1881 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1881, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_1882 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1882, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_1883 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1883, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_1884 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1884, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.test_hive_448 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_448, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1889.ts EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1886 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1886 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1886 +as +select t1.* +from test_hive_1889 t1 +inner join test_hive_1887 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1887 +PREHOOK: Input: default@test_hive_1888 +PREHOOK: Input: default@test_hive_1889 +PREHOOK: Input: default@test_hive_1890 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1886 +POSTHOOK: query: create view test_hive_1886 +as +select t1.* +from test_hive_1889 t1 +inner join test_hive_1887 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1887 +POSTHOOK: Input: default@test_hive_1888 +POSTHOOK: Input: default@test_hive_1889 +POSTHOOK: Input: default@test_hive_1890 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1886 +POSTHOOK: Lineage: test_hive_1886.creation_date EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.ds EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.ds_ts SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.source_file_name SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_1879 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1879, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_1880 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1880, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_1881 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1881, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_1882 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1882, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_1883 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1883, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_1884 EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_1884, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.test_hive_448 SIMPLE [(test_hive_1888)test_hive_1888.FieldSchema(name:test_hive_448, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1886.ts EXPRESSION [(test_hive_1888)test_hive_1888.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1837 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1837 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1837 +( + test_hive_1832 string + ,test_hive_1830 string + ,test_hive_1833 string + ,test_hive_444 string + ,test_hive_1831 string + ,test_hive_1835 string + ,test_hive_1834 string + ,test_hive_1836 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1837 +POSTHOOK: query: create table test_hive_1837 +( + test_hive_1832 string + ,test_hive_1830 string + ,test_hive_1833 string + ,test_hive_444 string + ,test_hive_1831 string + ,test_hive_1835 string + ,test_hive_1834 string + ,test_hive_1836 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1837 +PREHOOK: query: create table if not exists test_hive_1840 +( + test_hive_1832 string + ,test_hive_1830 string + ,test_hive_1833 string + ,test_hive_444 string + ,test_hive_1831 string + ,test_hive_1835 string + ,test_hive_1834 string + ,test_hive_1836 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1840 +POSTHOOK: query: create table if not exists test_hive_1840 +( + test_hive_1832 string + ,test_hive_1830 string + ,test_hive_1833 string + ,test_hive_444 string + ,test_hive_1831 string + ,test_hive_1835 string + ,test_hive_1834 string + ,test_hive_1836 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1840 +PREHOOK: query: drop table if exists test_hive_1839 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1839 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1839 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1839 +POSTHOOK: query: create table if not exists test_hive_1839 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1839 +PREHOOK: query: drop view if exists test_hive_1842 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1842 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1842 +as +select + cast(test_hive_1832 as int) as test_hive_1832 + ,cast(test_hive_1830 as int) as test_hive_1830 + ,cast(test_hive_1833 as int) as test_hive_1833 + ,cast(test_hive_444 as string) as test_hive_444 + ,cast(test_hive_1831 as string) as test_hive_1831 + ,cast(test_hive_1835 as string) as test_hive_1835 + ,cast(test_hive_1834 as string) as test_hive_1834 + ,cast(from_unixtime(unix_timestamp(test_hive_1836,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1836 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1840 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1840 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1842 +POSTHOOK: query: create view if not exists test_hive_1842 +as +select + cast(test_hive_1832 as int) as test_hive_1832 + ,cast(test_hive_1830 as int) as test_hive_1830 + ,cast(test_hive_1833 as int) as test_hive_1833 + ,cast(test_hive_444 as string) as test_hive_444 + ,cast(test_hive_1831 as string) as test_hive_1831 + ,cast(test_hive_1835 as string) as test_hive_1835 + ,cast(test_hive_1834 as string) as test_hive_1834 + ,cast(from_unixtime(unix_timestamp(test_hive_1836,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1836 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1840 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1840 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1842 +POSTHOOK: Lineage: test_hive_1842.creation_date EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.ds EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.ds_ts SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.source_file_name SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1830 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1830, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1831 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1831, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1832 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1832, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1833 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1833, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1834 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1834, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1835 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1835, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_1836 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1836, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.test_hive_444 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_444, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1842.ts EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1841 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1841 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1841 +as +select + test_hive_1832 as test_hive_1832 + ,test_hive_1830 as test_hive_1830 + ,test_hive_1833 as test_hive_1833 + ,test_hive_444 as test_hive_444 + ,test_hive_1831 as test_hive_1831 + ,test_hive_1835 as test_hive_1835 + ,test_hive_1834 as test_hive_1834 + ,test_hive_1836 as test_hive_1836 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1842 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1840 +PREHOOK: Input: default@test_hive_1842 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1841 +POSTHOOK: query: create view test_hive_1841 +as +select + test_hive_1832 as test_hive_1832 + ,test_hive_1830 as test_hive_1830 + ,test_hive_1833 as test_hive_1833 + ,test_hive_444 as test_hive_444 + ,test_hive_1831 as test_hive_1831 + ,test_hive_1835 as test_hive_1835 + ,test_hive_1834 as test_hive_1834 + ,test_hive_1836 as test_hive_1836 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1842 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1840 +POSTHOOK: Input: default@test_hive_1842 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1841 +POSTHOOK: Lineage: test_hive_1841.creation_date EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.ds EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.ds_ts SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.source_file_name SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1830 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1830, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1831 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1831, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1832 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1832, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1833 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1833, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1834 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1834, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1835 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1835, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_1836 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1836, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.test_hive_444 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_444, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1841.ts EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1838 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1838 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1838 +as +select t1.* +from test_hive_1841 t1 +inner join test_hive_1839 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1839 +PREHOOK: Input: default@test_hive_1840 +PREHOOK: Input: default@test_hive_1841 +PREHOOK: Input: default@test_hive_1842 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1838 +POSTHOOK: query: create view test_hive_1838 +as +select t1.* +from test_hive_1841 t1 +inner join test_hive_1839 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1839 +POSTHOOK: Input: default@test_hive_1840 +POSTHOOK: Input: default@test_hive_1841 +POSTHOOK: Input: default@test_hive_1842 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1838 +POSTHOOK: Lineage: test_hive_1838.creation_date EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.ds EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.ds_ts SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.source_file_name SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1830 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1830, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1831 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1831, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1832 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1832, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1833 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1833, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1834 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1834, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1835 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1835, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_1836 EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_1836, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.test_hive_444 SIMPLE [(test_hive_1840)test_hive_1840.FieldSchema(name:test_hive_444, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1838.ts EXPRESSION [(test_hive_1840)test_hive_1840.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1824 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1824 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1824 +( + test_hive_1818 string + ,test_hive_1816 string + ,test_hive_1819 string + ,test_hive_443 string + ,test_hive_1817 string + ,test_hive_1822 string + ,test_hive_1821 string + ,test_hive_1820 string + ,test_hive_1823 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1824 +POSTHOOK: query: create table test_hive_1824 +( + test_hive_1818 string + ,test_hive_1816 string + ,test_hive_1819 string + ,test_hive_443 string + ,test_hive_1817 string + ,test_hive_1822 string + ,test_hive_1821 string + ,test_hive_1820 string + ,test_hive_1823 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1824 +PREHOOK: query: create table if not exists test_hive_1827 +( + test_hive_1818 string + ,test_hive_1816 string + ,test_hive_1819 string + ,test_hive_443 string + ,test_hive_1817 string + ,test_hive_1822 string + ,test_hive_1821 string + ,test_hive_1820 string + ,test_hive_1823 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1827 +POSTHOOK: query: create table if not exists test_hive_1827 +( + test_hive_1818 string + ,test_hive_1816 string + ,test_hive_1819 string + ,test_hive_443 string + ,test_hive_1817 string + ,test_hive_1822 string + ,test_hive_1821 string + ,test_hive_1820 string + ,test_hive_1823 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1827 +PREHOOK: query: drop table if exists test_hive_1826 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1826 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1826 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1826 +POSTHOOK: query: create table if not exists test_hive_1826 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1826 +PREHOOK: query: drop view if exists test_hive_1829 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1829 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1829 +as +select + cast(test_hive_1818 as int) as test_hive_1818 + ,cast(test_hive_1816 as int) as test_hive_1816 + ,cast(test_hive_1819 as int) as test_hive_1819 + ,cast(test_hive_443 as string) as test_hive_443 + ,cast(test_hive_1817 as string) as test_hive_1817 + ,cast(test_hive_1822 as string) as test_hive_1822 + ,cast(test_hive_1821 as string) as test_hive_1821 + ,cast(test_hive_1820 as string) as test_hive_1820 + ,cast(from_unixtime(unix_timestamp(test_hive_1823,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1823 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1827 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1827 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1829 +POSTHOOK: query: create view if not exists test_hive_1829 +as +select + cast(test_hive_1818 as int) as test_hive_1818 + ,cast(test_hive_1816 as int) as test_hive_1816 + ,cast(test_hive_1819 as int) as test_hive_1819 + ,cast(test_hive_443 as string) as test_hive_443 + ,cast(test_hive_1817 as string) as test_hive_1817 + ,cast(test_hive_1822 as string) as test_hive_1822 + ,cast(test_hive_1821 as string) as test_hive_1821 + ,cast(test_hive_1820 as string) as test_hive_1820 + ,cast(from_unixtime(unix_timestamp(test_hive_1823,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1823 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1827 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1827 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1829 +POSTHOOK: Lineage: test_hive_1829.creation_date EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.ds EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.ds_ts SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.source_file_name SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1816 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1816, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1817 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1817, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1818 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1818, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1819 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1819, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1820 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1820, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1821 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1821, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1822 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1822, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_1823 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1823, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.test_hive_443 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_443, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1829.ts EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1828 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1828 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1828 +as +select + test_hive_1818 as test_hive_1818 + ,test_hive_1816 as test_hive_1816 + ,test_hive_1819 as test_hive_1819 + ,test_hive_443 as test_hive_443 + ,test_hive_1817 as test_hive_1817 + ,test_hive_1822 as test_hive_1822 + ,test_hive_1821 as test_hive_1821 + ,test_hive_1820 as test_hive_1820 + ,test_hive_1823 as test_hive_1823 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1829 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1827 +PREHOOK: Input: default@test_hive_1829 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1828 +POSTHOOK: query: create view test_hive_1828 +as +select + test_hive_1818 as test_hive_1818 + ,test_hive_1816 as test_hive_1816 + ,test_hive_1819 as test_hive_1819 + ,test_hive_443 as test_hive_443 + ,test_hive_1817 as test_hive_1817 + ,test_hive_1822 as test_hive_1822 + ,test_hive_1821 as test_hive_1821 + ,test_hive_1820 as test_hive_1820 + ,test_hive_1823 as test_hive_1823 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1829 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1827 +POSTHOOK: Input: default@test_hive_1829 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1828 +POSTHOOK: Lineage: test_hive_1828.creation_date EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.ds EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.ds_ts SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.source_file_name SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1816 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1816, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1817 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1817, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1818 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1818, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1819 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1819, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1820 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1820, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1821 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1821, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1822 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1822, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_1823 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1823, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.test_hive_443 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_443, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1828.ts EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1825 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1825 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1825 +as +select t1.* +from test_hive_1828 t1 +inner join test_hive_1826 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1826 +PREHOOK: Input: default@test_hive_1827 +PREHOOK: Input: default@test_hive_1828 +PREHOOK: Input: default@test_hive_1829 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1825 +POSTHOOK: query: create view test_hive_1825 +as +select t1.* +from test_hive_1828 t1 +inner join test_hive_1826 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1826 +POSTHOOK: Input: default@test_hive_1827 +POSTHOOK: Input: default@test_hive_1828 +POSTHOOK: Input: default@test_hive_1829 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1825 +POSTHOOK: Lineage: test_hive_1825.creation_date EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.ds EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.ds_ts SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.source_file_name SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1816 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1816, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1817 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1817, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1818 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1818, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1819 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1819, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1820 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1820, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1821 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1821, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1822 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1822, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_1823 EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_1823, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.test_hive_443 SIMPLE [(test_hive_1827)test_hive_1827.FieldSchema(name:test_hive_443, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1825.ts EXPRESSION [(test_hive_1827)test_hive_1827.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1810 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1810 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1810 +( + test_hive_1804 string + ,test_hive_1802 string + ,test_hive_1805 string + ,test_hive_442 string + ,test_hive_1803 string + ,test_hive_1808 string + ,test_hive_1807 string + ,test_hive_1806 string + ,test_hive_1809 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1810 +POSTHOOK: query: create table test_hive_1810 +( + test_hive_1804 string + ,test_hive_1802 string + ,test_hive_1805 string + ,test_hive_442 string + ,test_hive_1803 string + ,test_hive_1808 string + ,test_hive_1807 string + ,test_hive_1806 string + ,test_hive_1809 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1810 +PREHOOK: query: create table if not exists test_hive_1813 +( + test_hive_1804 string + ,test_hive_1802 string + ,test_hive_1805 string + ,test_hive_442 string + ,test_hive_1803 string + ,test_hive_1808 string + ,test_hive_1807 string + ,test_hive_1806 string + ,test_hive_1809 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1813 +POSTHOOK: query: create table if not exists test_hive_1813 +( + test_hive_1804 string + ,test_hive_1802 string + ,test_hive_1805 string + ,test_hive_442 string + ,test_hive_1803 string + ,test_hive_1808 string + ,test_hive_1807 string + ,test_hive_1806 string + ,test_hive_1809 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1813 +PREHOOK: query: drop table if exists test_hive_1812 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1812 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1812 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1812 +POSTHOOK: query: create table if not exists test_hive_1812 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1812 +PREHOOK: query: drop view if exists test_hive_1815 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1815 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1815 +as +select + cast(test_hive_1804 as int) as test_hive_1804 + ,cast(test_hive_1802 as int) as test_hive_1802 + ,cast(test_hive_1805 as int) as test_hive_1805 + ,cast(test_hive_442 as string) as test_hive_442 + ,cast(test_hive_1803 as string) as test_hive_1803 + ,cast(test_hive_1808 as string) as test_hive_1808 + ,cast(test_hive_1807 as string) as test_hive_1807 + ,cast(test_hive_1806 as string) as test_hive_1806 + ,cast(from_unixtime(unix_timestamp(test_hive_1809,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1809 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1813 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1813 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1815 +POSTHOOK: query: create view if not exists test_hive_1815 +as +select + cast(test_hive_1804 as int) as test_hive_1804 + ,cast(test_hive_1802 as int) as test_hive_1802 + ,cast(test_hive_1805 as int) as test_hive_1805 + ,cast(test_hive_442 as string) as test_hive_442 + ,cast(test_hive_1803 as string) as test_hive_1803 + ,cast(test_hive_1808 as string) as test_hive_1808 + ,cast(test_hive_1807 as string) as test_hive_1807 + ,cast(test_hive_1806 as string) as test_hive_1806 + ,cast(from_unixtime(unix_timestamp(test_hive_1809,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1809 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1813 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1813 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1815 +POSTHOOK: Lineage: test_hive_1815.creation_date EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.ds EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.ds_ts SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.source_file_name SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1802 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1802, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1803 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1803, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1804 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1804, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1805 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1805, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1806 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1806, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1807 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1807, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1808 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1808, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_1809 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1809, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.test_hive_442 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_442, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1815.ts EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1814 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1814 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1814 +as +select + test_hive_1804 as test_hive_1804 + ,test_hive_1802 as test_hive_1802 + ,test_hive_1805 as test_hive_1805 + ,test_hive_442 as test_hive_442 + ,test_hive_1803 as test_hive_1803 + ,test_hive_1808 as test_hive_1808 + ,test_hive_1807 as test_hive_1807 + ,test_hive_1806 as test_hive_1806 + ,test_hive_1809 as test_hive_1809 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1815 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1813 +PREHOOK: Input: default@test_hive_1815 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1814 +POSTHOOK: query: create view test_hive_1814 +as +select + test_hive_1804 as test_hive_1804 + ,test_hive_1802 as test_hive_1802 + ,test_hive_1805 as test_hive_1805 + ,test_hive_442 as test_hive_442 + ,test_hive_1803 as test_hive_1803 + ,test_hive_1808 as test_hive_1808 + ,test_hive_1807 as test_hive_1807 + ,test_hive_1806 as test_hive_1806 + ,test_hive_1809 as test_hive_1809 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1815 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1813 +POSTHOOK: Input: default@test_hive_1815 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1814 +POSTHOOK: Lineage: test_hive_1814.creation_date EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.ds EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.ds_ts SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.source_file_name SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1802 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1802, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1803 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1803, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1804 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1804, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1805 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1805, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1806 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1806, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1807 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1807, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1808 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1808, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_1809 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1809, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.test_hive_442 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_442, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1814.ts EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1811 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1811 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1811 +as +select t1.* +from test_hive_1814 t1 +inner join test_hive_1812 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1812 +PREHOOK: Input: default@test_hive_1813 +PREHOOK: Input: default@test_hive_1814 +PREHOOK: Input: default@test_hive_1815 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1811 +POSTHOOK: query: create view test_hive_1811 +as +select t1.* +from test_hive_1814 t1 +inner join test_hive_1812 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1812 +POSTHOOK: Input: default@test_hive_1813 +POSTHOOK: Input: default@test_hive_1814 +POSTHOOK: Input: default@test_hive_1815 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1811 +POSTHOOK: Lineage: test_hive_1811.creation_date EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.ds EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.ds_ts SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.source_file_name SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1802 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1802, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1803 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1803, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1804 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1804, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1805 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1805, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1806 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1806, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1807 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1807, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1808 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1808, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_1809 EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_1809, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.test_hive_442 SIMPLE [(test_hive_1813)test_hive_1813.FieldSchema(name:test_hive_442, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1811.ts EXPRESSION [(test_hive_1813)test_hive_1813.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1796 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1796 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1796 +( + test_hive_1790 string + ,test_hive_1788 string + ,test_hive_1791 string + ,test_hive_441 string + ,test_hive_1789 string + ,test_hive_1794 string + ,test_hive_1793 string + ,test_hive_1792 string + ,test_hive_1795 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1796 +POSTHOOK: query: create table test_hive_1796 +( + test_hive_1790 string + ,test_hive_1788 string + ,test_hive_1791 string + ,test_hive_441 string + ,test_hive_1789 string + ,test_hive_1794 string + ,test_hive_1793 string + ,test_hive_1792 string + ,test_hive_1795 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1796 +PREHOOK: query: create table if not exists test_hive_1799 +( + test_hive_1790 string + ,test_hive_1788 string + ,test_hive_1791 string + ,test_hive_441 string + ,test_hive_1789 string + ,test_hive_1794 string + ,test_hive_1793 string + ,test_hive_1792 string + ,test_hive_1795 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1799 +POSTHOOK: query: create table if not exists test_hive_1799 +( + test_hive_1790 string + ,test_hive_1788 string + ,test_hive_1791 string + ,test_hive_441 string + ,test_hive_1789 string + ,test_hive_1794 string + ,test_hive_1793 string + ,test_hive_1792 string + ,test_hive_1795 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1799 +PREHOOK: query: drop table if exists test_hive_1798 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1798 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1798 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1798 +POSTHOOK: query: create table if not exists test_hive_1798 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1798 +PREHOOK: query: drop view if exists test_hive_1801 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1801 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1801 +as +select + cast(test_hive_1790 as int) as test_hive_1790 + ,cast(test_hive_1788 as int) as test_hive_1788 + ,cast(test_hive_1791 as int) as test_hive_1791 + ,cast(test_hive_441 as string) as test_hive_441 + ,cast(test_hive_1789 as string) as test_hive_1789 + ,cast(test_hive_1794 as string) as test_hive_1794 + ,cast(test_hive_1793 as string) as test_hive_1793 + ,cast(test_hive_1792 as string) as test_hive_1792 + ,cast(from_unixtime(unix_timestamp(test_hive_1795,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1795 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1799 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1799 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1801 +POSTHOOK: query: create view if not exists test_hive_1801 +as +select + cast(test_hive_1790 as int) as test_hive_1790 + ,cast(test_hive_1788 as int) as test_hive_1788 + ,cast(test_hive_1791 as int) as test_hive_1791 + ,cast(test_hive_441 as string) as test_hive_441 + ,cast(test_hive_1789 as string) as test_hive_1789 + ,cast(test_hive_1794 as string) as test_hive_1794 + ,cast(test_hive_1793 as string) as test_hive_1793 + ,cast(test_hive_1792 as string) as test_hive_1792 + ,cast(from_unixtime(unix_timestamp(test_hive_1795,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1795 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1799 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1799 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1801 +POSTHOOK: Lineage: test_hive_1801.creation_date EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.ds EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.ds_ts SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.source_file_name SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1788 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1788, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1789 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1789, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1790 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1790, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1791 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1791, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1792 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1792, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1793 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1793, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1794 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1794, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_1795 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1795, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.test_hive_441 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_441, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1801.ts EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1800 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1800 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1800 +as +select + test_hive_1790 as test_hive_1790 + ,test_hive_1788 as test_hive_1788 + ,test_hive_1791 as test_hive_1791 + ,test_hive_441 as test_hive_441 + ,test_hive_1789 as test_hive_1789 + ,test_hive_1794 as test_hive_1794 + ,test_hive_1793 as test_hive_1793 + ,test_hive_1792 as test_hive_1792 + ,test_hive_1795 as test_hive_1795 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1801 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1799 +PREHOOK: Input: default@test_hive_1801 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1800 +POSTHOOK: query: create view test_hive_1800 +as +select + test_hive_1790 as test_hive_1790 + ,test_hive_1788 as test_hive_1788 + ,test_hive_1791 as test_hive_1791 + ,test_hive_441 as test_hive_441 + ,test_hive_1789 as test_hive_1789 + ,test_hive_1794 as test_hive_1794 + ,test_hive_1793 as test_hive_1793 + ,test_hive_1792 as test_hive_1792 + ,test_hive_1795 as test_hive_1795 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1801 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1799 +POSTHOOK: Input: default@test_hive_1801 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1800 +POSTHOOK: Lineage: test_hive_1800.creation_date EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.ds EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.ds_ts SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.source_file_name SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1788 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1788, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1789 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1789, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1790 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1790, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1791 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1791, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1792 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1792, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1793 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1793, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1794 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1794, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_1795 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1795, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.test_hive_441 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_441, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1800.ts EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1797 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1797 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1797 +as +select t1.* +from test_hive_1800 t1 +inner join test_hive_1798 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1798 +PREHOOK: Input: default@test_hive_1799 +PREHOOK: Input: default@test_hive_1800 +PREHOOK: Input: default@test_hive_1801 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1797 +POSTHOOK: query: create view test_hive_1797 +as +select t1.* +from test_hive_1800 t1 +inner join test_hive_1798 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1798 +POSTHOOK: Input: default@test_hive_1799 +POSTHOOK: Input: default@test_hive_1800 +POSTHOOK: Input: default@test_hive_1801 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1797 +POSTHOOK: Lineage: test_hive_1797.creation_date EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.ds EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.ds_ts SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.source_file_name SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1788 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1788, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1789 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1789, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1790 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1790, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1791 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1791, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1792 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1792, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1793 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1793, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1794 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1794, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_1795 EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_1795, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.test_hive_441 SIMPLE [(test_hive_1799)test_hive_1799.FieldSchema(name:test_hive_441, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1797.ts EXPRESSION [(test_hive_1799)test_hive_1799.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1782 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1782 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1782 +( + test_hive_1776 string + ,test_hive_1774 string + ,test_hive_1777 string + ,test_hive_440 string + ,test_hive_1775 string + ,test_hive_1780 string + ,test_hive_1779 string + ,test_hive_1778 string + ,test_hive_1781 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1782 +POSTHOOK: query: create table test_hive_1782 +( + test_hive_1776 string + ,test_hive_1774 string + ,test_hive_1777 string + ,test_hive_440 string + ,test_hive_1775 string + ,test_hive_1780 string + ,test_hive_1779 string + ,test_hive_1778 string + ,test_hive_1781 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1782 +PREHOOK: query: create table if not exists test_hive_1785 +( + test_hive_1776 string + ,test_hive_1774 string + ,test_hive_1777 string + ,test_hive_440 string + ,test_hive_1775 string + ,test_hive_1780 string + ,test_hive_1779 string + ,test_hive_1778 string + ,test_hive_1781 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1785 +POSTHOOK: query: create table if not exists test_hive_1785 +( + test_hive_1776 string + ,test_hive_1774 string + ,test_hive_1777 string + ,test_hive_440 string + ,test_hive_1775 string + ,test_hive_1780 string + ,test_hive_1779 string + ,test_hive_1778 string + ,test_hive_1781 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1785 +PREHOOK: query: drop table if exists test_hive_1784 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1784 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1784 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1784 +POSTHOOK: query: create table if not exists test_hive_1784 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1784 +PREHOOK: query: drop view if exists test_hive_1787 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1787 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1787 +as +select + cast(test_hive_1776 as int) as test_hive_1776 + ,cast(test_hive_1774 as int) as test_hive_1774 + ,cast(test_hive_1777 as int) as test_hive_1777 + ,cast(test_hive_440 as string) as test_hive_440 + ,cast(test_hive_1775 as string) as test_hive_1775 + ,cast(test_hive_1780 as string) as test_hive_1780 + ,cast(test_hive_1779 as string) as test_hive_1779 + ,cast(test_hive_1778 as string) as test_hive_1778 + ,cast(from_unixtime(unix_timestamp(test_hive_1781,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1781 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1785 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1785 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1787 +POSTHOOK: query: create view if not exists test_hive_1787 +as +select + cast(test_hive_1776 as int) as test_hive_1776 + ,cast(test_hive_1774 as int) as test_hive_1774 + ,cast(test_hive_1777 as int) as test_hive_1777 + ,cast(test_hive_440 as string) as test_hive_440 + ,cast(test_hive_1775 as string) as test_hive_1775 + ,cast(test_hive_1780 as string) as test_hive_1780 + ,cast(test_hive_1779 as string) as test_hive_1779 + ,cast(test_hive_1778 as string) as test_hive_1778 + ,cast(from_unixtime(unix_timestamp(test_hive_1781,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1781 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1785 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1785 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1787 +POSTHOOK: Lineage: test_hive_1787.creation_date EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.ds EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.ds_ts SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.source_file_name SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1774 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1774, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1775 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1775, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1776 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1776, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1777 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1777, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1778 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1778, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1779 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1779, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1780 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1780, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_1781 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1781, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.test_hive_440 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_440, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1787.ts EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1786 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1786 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1786 +as +select + test_hive_1776 as test_hive_1776 + ,test_hive_1774 as test_hive_1774 + ,test_hive_1777 as test_hive_1777 + ,test_hive_440 as test_hive_440 + ,test_hive_1775 as test_hive_1775 + ,test_hive_1780 as test_hive_1780 + ,test_hive_1779 as test_hive_1779 + ,test_hive_1778 as test_hive_1778 + ,test_hive_1781 as test_hive_1781 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1787 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1785 +PREHOOK: Input: default@test_hive_1787 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1786 +POSTHOOK: query: create view test_hive_1786 +as +select + test_hive_1776 as test_hive_1776 + ,test_hive_1774 as test_hive_1774 + ,test_hive_1777 as test_hive_1777 + ,test_hive_440 as test_hive_440 + ,test_hive_1775 as test_hive_1775 + ,test_hive_1780 as test_hive_1780 + ,test_hive_1779 as test_hive_1779 + ,test_hive_1778 as test_hive_1778 + ,test_hive_1781 as test_hive_1781 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1787 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1785 +POSTHOOK: Input: default@test_hive_1787 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1786 +POSTHOOK: Lineage: test_hive_1786.creation_date EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.ds EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.ds_ts SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.source_file_name SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1774 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1774, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1775 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1775, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1776 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1776, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1777 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1777, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1778 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1778, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1779 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1779, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1780 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1780, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_1781 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1781, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.test_hive_440 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_440, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1786.ts EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1783 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1783 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1783 +as +select t1.* +from test_hive_1786 t1 +inner join test_hive_1784 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1784 +PREHOOK: Input: default@test_hive_1785 +PREHOOK: Input: default@test_hive_1786 +PREHOOK: Input: default@test_hive_1787 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1783 +POSTHOOK: query: create view test_hive_1783 +as +select t1.* +from test_hive_1786 t1 +inner join test_hive_1784 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1784 +POSTHOOK: Input: default@test_hive_1785 +POSTHOOK: Input: default@test_hive_1786 +POSTHOOK: Input: default@test_hive_1787 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1783 +POSTHOOK: Lineage: test_hive_1783.creation_date EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.ds EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.ds_ts SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.source_file_name SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1774 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1774, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1775 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1775, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1776 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1776, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1777 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1777, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1778 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1778, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1779 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1779, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1780 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1780, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_1781 EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_1781, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.test_hive_440 SIMPLE [(test_hive_1785)test_hive_1785.FieldSchema(name:test_hive_440, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1783.ts EXPRESSION [(test_hive_1785)test_hive_1785.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1768 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1768 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1768 +( + test_hive_1764 string + ,test_hive_1762 string + ,test_hive_1765 string + ,test_hive_438 string + ,test_hive_439 string + ,test_hive_1763 string + ,test_hive_1766 string + ,test_hive_1767 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1768 +POSTHOOK: query: create table test_hive_1768 +( + test_hive_1764 string + ,test_hive_1762 string + ,test_hive_1765 string + ,test_hive_438 string + ,test_hive_439 string + ,test_hive_1763 string + ,test_hive_1766 string + ,test_hive_1767 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1768 +PREHOOK: query: create table if not exists test_hive_1771 +( + test_hive_1764 string + ,test_hive_1762 string + ,test_hive_1765 string + ,test_hive_438 string + ,test_hive_439 string + ,test_hive_1763 string + ,test_hive_1766 string + ,test_hive_1767 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1771 +POSTHOOK: query: create table if not exists test_hive_1771 +( + test_hive_1764 string + ,test_hive_1762 string + ,test_hive_1765 string + ,test_hive_438 string + ,test_hive_439 string + ,test_hive_1763 string + ,test_hive_1766 string + ,test_hive_1767 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1771 +PREHOOK: query: drop table if exists test_hive_1770 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1770 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1770 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1770 +POSTHOOK: query: create table if not exists test_hive_1770 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1770 +PREHOOK: query: drop view if exists test_hive_1773 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1773 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1773 +as +select + cast(test_hive_1764 as int) as test_hive_1764 + ,cast(test_hive_1762 as int) as test_hive_1762 + ,cast(test_hive_1765 as int) as test_hive_1765 + ,cast(test_hive_438 as string) as test_hive_438 + ,cast(test_hive_439 as string) as test_hive_439 + ,cast(test_hive_1763 as string) as test_hive_1763 + ,cast(test_hive_1766 as string) as test_hive_1766 + ,cast(from_unixtime(unix_timestamp(test_hive_1767,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1767 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1771 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1771 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1773 +POSTHOOK: query: create view if not exists test_hive_1773 +as +select + cast(test_hive_1764 as int) as test_hive_1764 + ,cast(test_hive_1762 as int) as test_hive_1762 + ,cast(test_hive_1765 as int) as test_hive_1765 + ,cast(test_hive_438 as string) as test_hive_438 + ,cast(test_hive_439 as string) as test_hive_439 + ,cast(test_hive_1763 as string) as test_hive_1763 + ,cast(test_hive_1766 as string) as test_hive_1766 + ,cast(from_unixtime(unix_timestamp(test_hive_1767,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1767 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1771 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1771 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1773 +POSTHOOK: Lineage: test_hive_1773.creation_date EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.ds EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.ds_ts SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.source_file_name SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_1762 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1762, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_1763 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1763, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_1764 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1764, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_1765 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1765, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_1766 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1766, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_1767 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1767, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_438 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_438, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.test_hive_439 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_439, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1773.ts EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1772 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1772 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1772 +as +select + test_hive_1764 as test_hive_1764 + ,test_hive_1762 as test_hive_1762 + ,test_hive_1765 as test_hive_1765 + ,test_hive_438 as test_hive_438 + ,test_hive_439 as test_hive_439 + ,test_hive_1763 as test_hive_1763 + ,test_hive_1766 as test_hive_1766 + ,test_hive_1767 as test_hive_1767 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1773 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1771 +PREHOOK: Input: default@test_hive_1773 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1772 +POSTHOOK: query: create view test_hive_1772 +as +select + test_hive_1764 as test_hive_1764 + ,test_hive_1762 as test_hive_1762 + ,test_hive_1765 as test_hive_1765 + ,test_hive_438 as test_hive_438 + ,test_hive_439 as test_hive_439 + ,test_hive_1763 as test_hive_1763 + ,test_hive_1766 as test_hive_1766 + ,test_hive_1767 as test_hive_1767 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1773 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1771 +POSTHOOK: Input: default@test_hive_1773 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1772 +POSTHOOK: Lineage: test_hive_1772.creation_date EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.ds EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.ds_ts SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.source_file_name SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_1762 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1762, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_1763 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1763, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_1764 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1764, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_1765 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1765, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_1766 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1766, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_1767 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1767, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_438 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_438, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.test_hive_439 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_439, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1772.ts EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1769 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1769 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1769 +as +select t1.* +from test_hive_1772 t1 +inner join test_hive_1770 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1770 +PREHOOK: Input: default@test_hive_1771 +PREHOOK: Input: default@test_hive_1772 +PREHOOK: Input: default@test_hive_1773 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1769 +POSTHOOK: query: create view test_hive_1769 +as +select t1.* +from test_hive_1772 t1 +inner join test_hive_1770 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1770 +POSTHOOK: Input: default@test_hive_1771 +POSTHOOK: Input: default@test_hive_1772 +POSTHOOK: Input: default@test_hive_1773 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1769 +POSTHOOK: Lineage: test_hive_1769.creation_date EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.ds EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.ds_ts SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.source_file_name SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_1762 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1762, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_1763 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1763, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_1764 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1764, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_1765 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1765, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_1766 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1766, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_1767 EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_1767, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_438 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_438, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.test_hive_439 SIMPLE [(test_hive_1771)test_hive_1771.FieldSchema(name:test_hive_439, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1769.ts EXPRESSION [(test_hive_1771)test_hive_1771.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1756 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1756 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1756 +( + test_hive_1752 string + ,test_hive_1750 string + ,test_hive_1753 string + ,test_hive_437 string + ,test_hive_1751 string + ,test_hive_1754 string + ,test_hive_1755 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1756 +POSTHOOK: query: create table test_hive_1756 +( + test_hive_1752 string + ,test_hive_1750 string + ,test_hive_1753 string + ,test_hive_437 string + ,test_hive_1751 string + ,test_hive_1754 string + ,test_hive_1755 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1756 +PREHOOK: query: create table if not exists test_hive_1759 +( + test_hive_1752 string + ,test_hive_1750 string + ,test_hive_1753 string + ,test_hive_437 string + ,test_hive_1751 string + ,test_hive_1754 string + ,test_hive_1755 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1759 +POSTHOOK: query: create table if not exists test_hive_1759 +( + test_hive_1752 string + ,test_hive_1750 string + ,test_hive_1753 string + ,test_hive_437 string + ,test_hive_1751 string + ,test_hive_1754 string + ,test_hive_1755 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1759 +PREHOOK: query: drop table if exists test_hive_1758 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1758 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1758 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1758 +POSTHOOK: query: create table if not exists test_hive_1758 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1758 +PREHOOK: query: drop view if exists test_hive_1761 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1761 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1761 +as +select + cast(test_hive_1752 as int) as test_hive_1752 + ,cast(test_hive_1750 as int) as test_hive_1750 + ,cast(test_hive_1753 as int) as test_hive_1753 + ,cast(test_hive_437 as string) as test_hive_437 + ,cast(test_hive_1751 as string) as test_hive_1751 + ,cast(test_hive_1754 as string) as test_hive_1754 + ,cast(from_unixtime(unix_timestamp(test_hive_1755,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1755 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1759 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1759 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1761 +POSTHOOK: query: create view if not exists test_hive_1761 +as +select + cast(test_hive_1752 as int) as test_hive_1752 + ,cast(test_hive_1750 as int) as test_hive_1750 + ,cast(test_hive_1753 as int) as test_hive_1753 + ,cast(test_hive_437 as string) as test_hive_437 + ,cast(test_hive_1751 as string) as test_hive_1751 + ,cast(test_hive_1754 as string) as test_hive_1754 + ,cast(from_unixtime(unix_timestamp(test_hive_1755,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1755 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1759 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1759 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1761 +POSTHOOK: Lineage: test_hive_1761.creation_date EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.ds EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.ds_ts SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.source_file_name SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_1750 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1750, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_1751 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1751, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_1752 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1752, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_1753 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1753, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_1754 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1754, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_1755 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1755, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.test_hive_437 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_437, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1761.ts EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1760 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1760 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1760 +as +select + test_hive_1752 as test_hive_1752 + ,test_hive_1750 as test_hive_1750 + ,test_hive_1753 as test_hive_1753 + ,test_hive_437 as test_hive_437 + ,test_hive_1751 as test_hive_1751 + ,test_hive_1754 as test_hive_1754 + ,test_hive_1755 as test_hive_1755 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1761 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1759 +PREHOOK: Input: default@test_hive_1761 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1760 +POSTHOOK: query: create view test_hive_1760 +as +select + test_hive_1752 as test_hive_1752 + ,test_hive_1750 as test_hive_1750 + ,test_hive_1753 as test_hive_1753 + ,test_hive_437 as test_hive_437 + ,test_hive_1751 as test_hive_1751 + ,test_hive_1754 as test_hive_1754 + ,test_hive_1755 as test_hive_1755 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1761 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1759 +POSTHOOK: Input: default@test_hive_1761 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1760 +POSTHOOK: Lineage: test_hive_1760.creation_date EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.ds EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.ds_ts SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.source_file_name SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_1750 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1750, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_1751 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1751, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_1752 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1752, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_1753 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1753, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_1754 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1754, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_1755 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1755, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.test_hive_437 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_437, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1760.ts EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1757 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1757 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1757 +as +select t1.* +from test_hive_1760 t1 +inner join test_hive_1758 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1758 +PREHOOK: Input: default@test_hive_1759 +PREHOOK: Input: default@test_hive_1760 +PREHOOK: Input: default@test_hive_1761 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1757 +POSTHOOK: query: create view test_hive_1757 +as +select t1.* +from test_hive_1760 t1 +inner join test_hive_1758 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1758 +POSTHOOK: Input: default@test_hive_1759 +POSTHOOK: Input: default@test_hive_1760 +POSTHOOK: Input: default@test_hive_1761 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1757 +POSTHOOK: Lineage: test_hive_1757.creation_date EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.ds EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.ds_ts SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.source_file_name SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_1750 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1750, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_1751 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1751, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_1752 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1752, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_1753 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1753, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_1754 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1754, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_1755 EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_1755, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.test_hive_437 SIMPLE [(test_hive_1759)test_hive_1759.FieldSchema(name:test_hive_437, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1757.ts EXPRESSION [(test_hive_1759)test_hive_1759.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1744 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1744 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1744 +( + test_hive_1740 string + ,test_hive_1738 string + ,test_hive_1741 string + ,test_hive_436 string + ,test_hive_1739 string + ,test_hive_1742 string + ,test_hive_1743 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1744 +POSTHOOK: query: create table test_hive_1744 +( + test_hive_1740 string + ,test_hive_1738 string + ,test_hive_1741 string + ,test_hive_436 string + ,test_hive_1739 string + ,test_hive_1742 string + ,test_hive_1743 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1744 +PREHOOK: query: create table if not exists test_hive_1747 +( + test_hive_1740 string + ,test_hive_1738 string + ,test_hive_1741 string + ,test_hive_436 string + ,test_hive_1739 string + ,test_hive_1742 string + ,test_hive_1743 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1747 +POSTHOOK: query: create table if not exists test_hive_1747 +( + test_hive_1740 string + ,test_hive_1738 string + ,test_hive_1741 string + ,test_hive_436 string + ,test_hive_1739 string + ,test_hive_1742 string + ,test_hive_1743 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1747 +PREHOOK: query: drop table if exists test_hive_1746 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1746 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1746 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1746 +POSTHOOK: query: create table if not exists test_hive_1746 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1746 +PREHOOK: query: drop view if exists test_hive_1749 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1749 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1749 +as +select + cast(test_hive_1740 as int) as test_hive_1740 + ,cast(test_hive_1738 as int) as test_hive_1738 + ,cast(test_hive_1741 as int) as test_hive_1741 + ,cast(test_hive_436 as string) as test_hive_436 + ,cast(test_hive_1739 as string) as test_hive_1739 + ,cast(test_hive_1742 as string) as test_hive_1742 + ,cast(from_unixtime(unix_timestamp(test_hive_1743,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1743 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1747 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1747 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1749 +POSTHOOK: query: create view if not exists test_hive_1749 +as +select + cast(test_hive_1740 as int) as test_hive_1740 + ,cast(test_hive_1738 as int) as test_hive_1738 + ,cast(test_hive_1741 as int) as test_hive_1741 + ,cast(test_hive_436 as string) as test_hive_436 + ,cast(test_hive_1739 as string) as test_hive_1739 + ,cast(test_hive_1742 as string) as test_hive_1742 + ,cast(from_unixtime(unix_timestamp(test_hive_1743,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1743 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1747 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1747 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1749 +POSTHOOK: Lineage: test_hive_1749.creation_date EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.ds EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.ds_ts SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.source_file_name SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_1738 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1738, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_1739 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1739, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_1740 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1740, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_1741 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1741, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_1742 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1742, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_1743 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1743, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.test_hive_436 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_436, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1749.ts EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1748 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1748 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1748 +as +select + test_hive_1740 as test_hive_1740 + ,test_hive_1738 as test_hive_1738 + ,test_hive_1741 as test_hive_1741 + ,test_hive_436 as test_hive_436 + ,test_hive_1739 as test_hive_1739 + ,test_hive_1742 as test_hive_1742 + ,test_hive_1743 as test_hive_1743 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1749 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1747 +PREHOOK: Input: default@test_hive_1749 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1748 +POSTHOOK: query: create view test_hive_1748 +as +select + test_hive_1740 as test_hive_1740 + ,test_hive_1738 as test_hive_1738 + ,test_hive_1741 as test_hive_1741 + ,test_hive_436 as test_hive_436 + ,test_hive_1739 as test_hive_1739 + ,test_hive_1742 as test_hive_1742 + ,test_hive_1743 as test_hive_1743 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1749 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1747 +POSTHOOK: Input: default@test_hive_1749 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1748 +POSTHOOK: Lineage: test_hive_1748.creation_date EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.ds EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.ds_ts SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.source_file_name SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_1738 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1738, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_1739 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1739, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_1740 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1740, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_1741 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1741, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_1742 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1742, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_1743 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1743, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.test_hive_436 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_436, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1748.ts EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1745 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1745 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1745 +as +select t1.* +from test_hive_1748 t1 +inner join test_hive_1746 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1746 +PREHOOK: Input: default@test_hive_1747 +PREHOOK: Input: default@test_hive_1748 +PREHOOK: Input: default@test_hive_1749 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1745 +POSTHOOK: query: create view test_hive_1745 +as +select t1.* +from test_hive_1748 t1 +inner join test_hive_1746 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1746 +POSTHOOK: Input: default@test_hive_1747 +POSTHOOK: Input: default@test_hive_1748 +POSTHOOK: Input: default@test_hive_1749 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1745 +POSTHOOK: Lineage: test_hive_1745.creation_date EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.ds EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.ds_ts SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.source_file_name SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_1738 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1738, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_1739 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1739, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_1740 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1740, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_1741 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1741, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_1742 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1742, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_1743 EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_1743, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.test_hive_436 SIMPLE [(test_hive_1747)test_hive_1747.FieldSchema(name:test_hive_436, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1745.ts EXPRESSION [(test_hive_1747)test_hive_1747.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1732 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1732 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1732 +( + test_hive_1728 string + ,test_hive_1726 string + ,test_hive_1729 string + ,test_hive_435 string + ,test_hive_1727 string + ,test_hive_1730 string + ,test_hive_1731 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1732 +POSTHOOK: query: create table test_hive_1732 +( + test_hive_1728 string + ,test_hive_1726 string + ,test_hive_1729 string + ,test_hive_435 string + ,test_hive_1727 string + ,test_hive_1730 string + ,test_hive_1731 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1732 +PREHOOK: query: create table if not exists test_hive_1735 +( + test_hive_1728 string + ,test_hive_1726 string + ,test_hive_1729 string + ,test_hive_435 string + ,test_hive_1727 string + ,test_hive_1730 string + ,test_hive_1731 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1735 +POSTHOOK: query: create table if not exists test_hive_1735 +( + test_hive_1728 string + ,test_hive_1726 string + ,test_hive_1729 string + ,test_hive_435 string + ,test_hive_1727 string + ,test_hive_1730 string + ,test_hive_1731 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1735 +PREHOOK: query: drop table if exists test_hive_1734 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1734 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1734 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1734 +POSTHOOK: query: create table if not exists test_hive_1734 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1734 +PREHOOK: query: drop view if exists test_hive_1737 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1737 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1737 +as +select + cast(test_hive_1728 as int) as test_hive_1728 + ,cast(test_hive_1726 as int) as test_hive_1726 + ,cast(test_hive_1729 as int) as test_hive_1729 + ,cast(test_hive_435 as string) as test_hive_435 + ,cast(test_hive_1727 as string) as test_hive_1727 + ,cast(test_hive_1730 as string) as test_hive_1730 + ,cast(from_unixtime(unix_timestamp(test_hive_1731,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1731 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1735 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1735 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1737 +POSTHOOK: query: create view if not exists test_hive_1737 +as +select + cast(test_hive_1728 as int) as test_hive_1728 + ,cast(test_hive_1726 as int) as test_hive_1726 + ,cast(test_hive_1729 as int) as test_hive_1729 + ,cast(test_hive_435 as string) as test_hive_435 + ,cast(test_hive_1727 as string) as test_hive_1727 + ,cast(test_hive_1730 as string) as test_hive_1730 + ,cast(from_unixtime(unix_timestamp(test_hive_1731,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1731 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1735 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1735 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1737 +POSTHOOK: Lineage: test_hive_1737.creation_date EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.ds EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.ds_ts SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.source_file_name SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_1726 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1726, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_1727 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1727, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_1728 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1728, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_1729 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1729, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_1730 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1730, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_1731 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1731, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.test_hive_435 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_435, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1737.ts EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1736 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1736 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1736 +as +select + test_hive_1728 as test_hive_1728 + ,test_hive_1726 as test_hive_1726 + ,test_hive_1729 as test_hive_1729 + ,test_hive_435 as test_hive_435 + ,test_hive_1727 as test_hive_1727 + ,test_hive_1730 as test_hive_1730 + ,test_hive_1731 as test_hive_1731 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1737 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1735 +PREHOOK: Input: default@test_hive_1737 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1736 +POSTHOOK: query: create view test_hive_1736 +as +select + test_hive_1728 as test_hive_1728 + ,test_hive_1726 as test_hive_1726 + ,test_hive_1729 as test_hive_1729 + ,test_hive_435 as test_hive_435 + ,test_hive_1727 as test_hive_1727 + ,test_hive_1730 as test_hive_1730 + ,test_hive_1731 as test_hive_1731 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1737 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1735 +POSTHOOK: Input: default@test_hive_1737 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1736 +POSTHOOK: Lineage: test_hive_1736.creation_date EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.ds EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.ds_ts SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.source_file_name SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_1726 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1726, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_1727 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1727, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_1728 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1728, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_1729 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1729, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_1730 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1730, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_1731 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1731, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.test_hive_435 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_435, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1736.ts EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1733 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1733 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1733 +as +select t1.* +from test_hive_1736 t1 +inner join test_hive_1734 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1734 +PREHOOK: Input: default@test_hive_1735 +PREHOOK: Input: default@test_hive_1736 +PREHOOK: Input: default@test_hive_1737 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1733 +POSTHOOK: query: create view test_hive_1733 +as +select t1.* +from test_hive_1736 t1 +inner join test_hive_1734 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1734 +POSTHOOK: Input: default@test_hive_1735 +POSTHOOK: Input: default@test_hive_1736 +POSTHOOK: Input: default@test_hive_1737 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1733 +POSTHOOK: Lineage: test_hive_1733.creation_date EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.ds EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.ds_ts SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.source_file_name SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_1726 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1726, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_1727 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1727, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_1728 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1728, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_1729 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1729, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_1730 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1730, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_1731 EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_1731, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.test_hive_435 SIMPLE [(test_hive_1735)test_hive_1735.FieldSchema(name:test_hive_435, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1733.ts EXPRESSION [(test_hive_1735)test_hive_1735.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1720 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1720 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1720 +( + test_hive_1714 string + ,test_hive_1712 string + ,test_hive_1715 string + ,test_hive_434 string + ,test_hive_1713 string + ,test_hive_1718 string + ,test_hive_1717 string + ,test_hive_1716 string + ,test_hive_1719 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1720 +POSTHOOK: query: create table test_hive_1720 +( + test_hive_1714 string + ,test_hive_1712 string + ,test_hive_1715 string + ,test_hive_434 string + ,test_hive_1713 string + ,test_hive_1718 string + ,test_hive_1717 string + ,test_hive_1716 string + ,test_hive_1719 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1720 +PREHOOK: query: create table if not exists test_hive_1723 +( + test_hive_1714 string + ,test_hive_1712 string + ,test_hive_1715 string + ,test_hive_434 string + ,test_hive_1713 string + ,test_hive_1718 string + ,test_hive_1717 string + ,test_hive_1716 string + ,test_hive_1719 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1723 +POSTHOOK: query: create table if not exists test_hive_1723 +( + test_hive_1714 string + ,test_hive_1712 string + ,test_hive_1715 string + ,test_hive_434 string + ,test_hive_1713 string + ,test_hive_1718 string + ,test_hive_1717 string + ,test_hive_1716 string + ,test_hive_1719 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1723 +PREHOOK: query: drop table if exists test_hive_1722 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1722 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1722 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1722 +POSTHOOK: query: create table if not exists test_hive_1722 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1722 +PREHOOK: query: drop view if exists test_hive_1725 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1725 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1725 +as +select + cast(test_hive_1714 as int) as test_hive_1714 + ,cast(test_hive_1712 as int) as test_hive_1712 + ,cast(test_hive_1715 as int) as test_hive_1715 + ,cast(test_hive_434 as string) as test_hive_434 + ,cast(test_hive_1713 as string) as test_hive_1713 + ,cast(test_hive_1718 as string) as test_hive_1718 + ,cast(test_hive_1717 as string) as test_hive_1717 + ,cast(test_hive_1716 as string) as test_hive_1716 + ,cast(from_unixtime(unix_timestamp(test_hive_1719,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1719 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1723 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1723 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1725 +POSTHOOK: query: create view if not exists test_hive_1725 +as +select + cast(test_hive_1714 as int) as test_hive_1714 + ,cast(test_hive_1712 as int) as test_hive_1712 + ,cast(test_hive_1715 as int) as test_hive_1715 + ,cast(test_hive_434 as string) as test_hive_434 + ,cast(test_hive_1713 as string) as test_hive_1713 + ,cast(test_hive_1718 as string) as test_hive_1718 + ,cast(test_hive_1717 as string) as test_hive_1717 + ,cast(test_hive_1716 as string) as test_hive_1716 + ,cast(from_unixtime(unix_timestamp(test_hive_1719,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1719 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1723 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1723 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1725 +POSTHOOK: Lineage: test_hive_1725.creation_date EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.ds EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.ds_ts SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.source_file_name SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1712 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1712, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1713 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1713, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1714 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1714, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1715 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1715, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1716 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1716, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1717 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1717, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1718 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1718, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_1719 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1719, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.test_hive_434 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_434, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1725.ts EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1724 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1724 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1724 +as +select + test_hive_1714 as test_hive_1714 + ,test_hive_1712 as test_hive_1712 + ,test_hive_1715 as test_hive_1715 + ,test_hive_434 as test_hive_434 + ,test_hive_1713 as test_hive_1713 + ,test_hive_1718 as test_hive_1718 + ,test_hive_1717 as test_hive_1717 + ,test_hive_1716 as test_hive_1716 + ,test_hive_1719 as test_hive_1719 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1725 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1723 +PREHOOK: Input: default@test_hive_1725 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1724 +POSTHOOK: query: create view test_hive_1724 +as +select + test_hive_1714 as test_hive_1714 + ,test_hive_1712 as test_hive_1712 + ,test_hive_1715 as test_hive_1715 + ,test_hive_434 as test_hive_434 + ,test_hive_1713 as test_hive_1713 + ,test_hive_1718 as test_hive_1718 + ,test_hive_1717 as test_hive_1717 + ,test_hive_1716 as test_hive_1716 + ,test_hive_1719 as test_hive_1719 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1725 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1723 +POSTHOOK: Input: default@test_hive_1725 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1724 +POSTHOOK: Lineage: test_hive_1724.creation_date EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.ds EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.ds_ts SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.source_file_name SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1712 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1712, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1713 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1713, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1714 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1714, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1715 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1715, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1716 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1716, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1717 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1717, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1718 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1718, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_1719 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1719, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.test_hive_434 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_434, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1724.ts EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1721 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1721 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1721 +as +select t1.* +from test_hive_1724 t1 +inner join test_hive_1722 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1722 +PREHOOK: Input: default@test_hive_1723 +PREHOOK: Input: default@test_hive_1724 +PREHOOK: Input: default@test_hive_1725 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1721 +POSTHOOK: query: create view test_hive_1721 +as +select t1.* +from test_hive_1724 t1 +inner join test_hive_1722 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1722 +POSTHOOK: Input: default@test_hive_1723 +POSTHOOK: Input: default@test_hive_1724 +POSTHOOK: Input: default@test_hive_1725 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1721 +POSTHOOK: Lineage: test_hive_1721.creation_date EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.ds EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.ds_ts SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.source_file_name SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1712 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1712, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1713 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1713, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1714 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1714, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1715 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1715, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1716 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1716, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1717 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1717, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1718 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1718, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_1719 EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_1719, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.test_hive_434 SIMPLE [(test_hive_1723)test_hive_1723.FieldSchema(name:test_hive_434, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1721.ts EXPRESSION [(test_hive_1723)test_hive_1723.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1706 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1706 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1706 +( + test_hive_1700 string + ,test_hive_1698 string + ,test_hive_1701 string + ,test_hive_433 string + ,test_hive_1699 string + ,test_hive_1704 string + ,test_hive_1703 string + ,test_hive_1702 string + ,test_hive_1705 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1706 +POSTHOOK: query: create table test_hive_1706 +( + test_hive_1700 string + ,test_hive_1698 string + ,test_hive_1701 string + ,test_hive_433 string + ,test_hive_1699 string + ,test_hive_1704 string + ,test_hive_1703 string + ,test_hive_1702 string + ,test_hive_1705 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1706 +PREHOOK: query: create table if not exists test_hive_1709 +( + test_hive_1700 string + ,test_hive_1698 string + ,test_hive_1701 string + ,test_hive_433 string + ,test_hive_1699 string + ,test_hive_1704 string + ,test_hive_1703 string + ,test_hive_1702 string + ,test_hive_1705 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1709 +POSTHOOK: query: create table if not exists test_hive_1709 +( + test_hive_1700 string + ,test_hive_1698 string + ,test_hive_1701 string + ,test_hive_433 string + ,test_hive_1699 string + ,test_hive_1704 string + ,test_hive_1703 string + ,test_hive_1702 string + ,test_hive_1705 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1709 +PREHOOK: query: drop table if exists test_hive_1708 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1708 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1708 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1708 +POSTHOOK: query: create table if not exists test_hive_1708 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1708 +PREHOOK: query: drop view if exists test_hive_1711 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1711 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1711 +as +select + cast(test_hive_1700 as int) as test_hive_1700 + ,cast(test_hive_1698 as int) as test_hive_1698 + ,cast(test_hive_1701 as int) as test_hive_1701 + ,cast(test_hive_433 as string) as test_hive_433 + ,cast(test_hive_1699 as string) as test_hive_1699 + ,cast(test_hive_1704 as string) as test_hive_1704 + ,cast(test_hive_1703 as string) as test_hive_1703 + ,cast(test_hive_1702 as string) as test_hive_1702 + ,cast(from_unixtime(unix_timestamp(test_hive_1705,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1705 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1709 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1709 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1711 +POSTHOOK: query: create view if not exists test_hive_1711 +as +select + cast(test_hive_1700 as int) as test_hive_1700 + ,cast(test_hive_1698 as int) as test_hive_1698 + ,cast(test_hive_1701 as int) as test_hive_1701 + ,cast(test_hive_433 as string) as test_hive_433 + ,cast(test_hive_1699 as string) as test_hive_1699 + ,cast(test_hive_1704 as string) as test_hive_1704 + ,cast(test_hive_1703 as string) as test_hive_1703 + ,cast(test_hive_1702 as string) as test_hive_1702 + ,cast(from_unixtime(unix_timestamp(test_hive_1705,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1705 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1709 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1709 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1711 +POSTHOOK: Lineage: test_hive_1711.creation_date EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.ds EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.ds_ts SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.source_file_name SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1698 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1698, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1699 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1699, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1700 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1700, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1701 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1701, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1702 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1702, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1703 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1703, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1704 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1704, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_1705 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1705, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.test_hive_433 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_433, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1711.ts EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1710 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1710 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1710 +as +select + test_hive_1700 as test_hive_1700 + ,test_hive_1698 as test_hive_1698 + ,test_hive_1701 as test_hive_1701 + ,test_hive_433 as test_hive_433 + ,test_hive_1699 as test_hive_1699 + ,test_hive_1704 as test_hive_1704 + ,test_hive_1703 as test_hive_1703 + ,test_hive_1702 as test_hive_1702 + ,test_hive_1705 as test_hive_1705 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1711 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1709 +PREHOOK: Input: default@test_hive_1711 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1710 +POSTHOOK: query: create view test_hive_1710 +as +select + test_hive_1700 as test_hive_1700 + ,test_hive_1698 as test_hive_1698 + ,test_hive_1701 as test_hive_1701 + ,test_hive_433 as test_hive_433 + ,test_hive_1699 as test_hive_1699 + ,test_hive_1704 as test_hive_1704 + ,test_hive_1703 as test_hive_1703 + ,test_hive_1702 as test_hive_1702 + ,test_hive_1705 as test_hive_1705 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1711 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1709 +POSTHOOK: Input: default@test_hive_1711 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1710 +POSTHOOK: Lineage: test_hive_1710.creation_date EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.ds EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.ds_ts SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.source_file_name SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1698 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1698, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1699 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1699, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1700 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1700, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1701 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1701, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1702 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1702, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1703 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1703, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1704 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1704, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_1705 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1705, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.test_hive_433 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_433, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1710.ts EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1707 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1707 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1707 +as +select t1.* +from test_hive_1710 t1 +inner join test_hive_1708 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1708 +PREHOOK: Input: default@test_hive_1709 +PREHOOK: Input: default@test_hive_1710 +PREHOOK: Input: default@test_hive_1711 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1707 +POSTHOOK: query: create view test_hive_1707 +as +select t1.* +from test_hive_1710 t1 +inner join test_hive_1708 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1708 +POSTHOOK: Input: default@test_hive_1709 +POSTHOOK: Input: default@test_hive_1710 +POSTHOOK: Input: default@test_hive_1711 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1707 +POSTHOOK: Lineage: test_hive_1707.creation_date EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.ds EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.ds_ts SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.source_file_name SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1698 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1698, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1699 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1699, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1700 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1700, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1701 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1701, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1702 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1702, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1703 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1703, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1704 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1704, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_1705 EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_1705, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.test_hive_433 SIMPLE [(test_hive_1709)test_hive_1709.FieldSchema(name:test_hive_433, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1707.ts EXPRESSION [(test_hive_1709)test_hive_1709.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1692 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1692 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1692 +( + test_hive_1688 string + ,test_hive_1686 string + ,test_hive_1689 string + ,test_hive_432 string + ,test_hive_1687 string + ,test_hive_1690 string + ,test_hive_1691 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1692 +POSTHOOK: query: create table test_hive_1692 +( + test_hive_1688 string + ,test_hive_1686 string + ,test_hive_1689 string + ,test_hive_432 string + ,test_hive_1687 string + ,test_hive_1690 string + ,test_hive_1691 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1692 +PREHOOK: query: create table if not exists test_hive_1695 +( + test_hive_1688 string + ,test_hive_1686 string + ,test_hive_1689 string + ,test_hive_432 string + ,test_hive_1687 string + ,test_hive_1690 string + ,test_hive_1691 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1695 +POSTHOOK: query: create table if not exists test_hive_1695 +( + test_hive_1688 string + ,test_hive_1686 string + ,test_hive_1689 string + ,test_hive_432 string + ,test_hive_1687 string + ,test_hive_1690 string + ,test_hive_1691 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1695 +PREHOOK: query: drop table if exists test_hive_1694 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1694 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1694 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1694 +POSTHOOK: query: create table if not exists test_hive_1694 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1694 +PREHOOK: query: drop view if exists test_hive_1697 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1697 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1697 +as +select + cast(test_hive_1688 as int) as test_hive_1688 + ,cast(test_hive_1686 as int) as test_hive_1686 + ,cast(test_hive_1689 as int) as test_hive_1689 + ,cast(test_hive_432 as string) as test_hive_432 + ,cast(test_hive_1687 as string) as test_hive_1687 + ,cast(test_hive_1690 as string) as test_hive_1690 + ,cast(from_unixtime(unix_timestamp(test_hive_1691,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1691 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1695 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1695 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1697 +POSTHOOK: query: create view if not exists test_hive_1697 +as +select + cast(test_hive_1688 as int) as test_hive_1688 + ,cast(test_hive_1686 as int) as test_hive_1686 + ,cast(test_hive_1689 as int) as test_hive_1689 + ,cast(test_hive_432 as string) as test_hive_432 + ,cast(test_hive_1687 as string) as test_hive_1687 + ,cast(test_hive_1690 as string) as test_hive_1690 + ,cast(from_unixtime(unix_timestamp(test_hive_1691,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1691 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1695 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1695 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1697 +POSTHOOK: Lineage: test_hive_1697.creation_date EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.ds EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.ds_ts SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.source_file_name SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_1686 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1686, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_1687 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1687, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_1688 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1688, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_1689 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1689, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_1690 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1690, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_1691 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1691, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.test_hive_432 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_432, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1697.ts EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1696 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1696 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1696 +as +select + test_hive_1688 as test_hive_1688 + ,test_hive_1686 as test_hive_1686 + ,test_hive_1689 as test_hive_1689 + ,test_hive_432 as test_hive_432 + ,test_hive_1687 as test_hive_1687 + ,test_hive_1690 as test_hive_1690 + ,test_hive_1691 as test_hive_1691 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1697 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1695 +PREHOOK: Input: default@test_hive_1697 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1696 +POSTHOOK: query: create view test_hive_1696 +as +select + test_hive_1688 as test_hive_1688 + ,test_hive_1686 as test_hive_1686 + ,test_hive_1689 as test_hive_1689 + ,test_hive_432 as test_hive_432 + ,test_hive_1687 as test_hive_1687 + ,test_hive_1690 as test_hive_1690 + ,test_hive_1691 as test_hive_1691 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1697 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1695 +POSTHOOK: Input: default@test_hive_1697 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1696 +POSTHOOK: Lineage: test_hive_1696.creation_date EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.ds EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.ds_ts SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.source_file_name SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_1686 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1686, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_1687 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1687, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_1688 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1688, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_1689 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1689, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_1690 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1690, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_1691 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1691, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.test_hive_432 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_432, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1696.ts EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1693 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1693 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1693 +as +select t1.* +from test_hive_1696 t1 +inner join test_hive_1694 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1694 +PREHOOK: Input: default@test_hive_1695 +PREHOOK: Input: default@test_hive_1696 +PREHOOK: Input: default@test_hive_1697 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1693 +POSTHOOK: query: create view test_hive_1693 +as +select t1.* +from test_hive_1696 t1 +inner join test_hive_1694 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1694 +POSTHOOK: Input: default@test_hive_1695 +POSTHOOK: Input: default@test_hive_1696 +POSTHOOK: Input: default@test_hive_1697 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1693 +POSTHOOK: Lineage: test_hive_1693.creation_date EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.ds EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.ds_ts SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.source_file_name SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_1686 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1686, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_1687 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1687, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_1688 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1688, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_1689 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1689, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_1690 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1690, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_1691 EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_1691, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.test_hive_432 SIMPLE [(test_hive_1695)test_hive_1695.FieldSchema(name:test_hive_432, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1693.ts EXPRESSION [(test_hive_1695)test_hive_1695.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1680 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1680 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1680 +( + test_hive_1676 string + ,test_hive_1674 string + ,test_hive_1677 string + ,test_hive_431 string + ,test_hive_1675 string + ,test_hive_1678 string + ,test_hive_1679 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1680 +POSTHOOK: query: create table test_hive_1680 +( + test_hive_1676 string + ,test_hive_1674 string + ,test_hive_1677 string + ,test_hive_431 string + ,test_hive_1675 string + ,test_hive_1678 string + ,test_hive_1679 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1680 +PREHOOK: query: create table if not exists test_hive_1683 +( + test_hive_1676 string + ,test_hive_1674 string + ,test_hive_1677 string + ,test_hive_431 string + ,test_hive_1675 string + ,test_hive_1678 string + ,test_hive_1679 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1683 +POSTHOOK: query: create table if not exists test_hive_1683 +( + test_hive_1676 string + ,test_hive_1674 string + ,test_hive_1677 string + ,test_hive_431 string + ,test_hive_1675 string + ,test_hive_1678 string + ,test_hive_1679 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1683 +PREHOOK: query: drop table if exists test_hive_1682 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1682 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1682 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1682 +POSTHOOK: query: create table if not exists test_hive_1682 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1682 +PREHOOK: query: drop view if exists test_hive_1685 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1685 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1685 +as +select + cast(test_hive_1676 as int) as test_hive_1676 + ,cast(test_hive_1674 as int) as test_hive_1674 + ,cast(test_hive_1677 as int) as test_hive_1677 + ,cast(test_hive_431 as string) as test_hive_431 + ,cast(test_hive_1675 as string) as test_hive_1675 + ,cast(test_hive_1678 as string) as test_hive_1678 + ,cast(from_unixtime(unix_timestamp(test_hive_1679,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1679 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1683 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1683 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1685 +POSTHOOK: query: create view if not exists test_hive_1685 +as +select + cast(test_hive_1676 as int) as test_hive_1676 + ,cast(test_hive_1674 as int) as test_hive_1674 + ,cast(test_hive_1677 as int) as test_hive_1677 + ,cast(test_hive_431 as string) as test_hive_431 + ,cast(test_hive_1675 as string) as test_hive_1675 + ,cast(test_hive_1678 as string) as test_hive_1678 + ,cast(from_unixtime(unix_timestamp(test_hive_1679,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1679 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1683 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1683 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1685 +POSTHOOK: Lineage: test_hive_1685.creation_date EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.ds EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.ds_ts SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.source_file_name SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_1674 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1674, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_1675 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1675, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_1676 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1676, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_1677 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1677, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_1678 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1678, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_1679 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1679, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.test_hive_431 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_431, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1685.ts EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1684 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1684 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1684 +as +select + test_hive_1676 as test_hive_1676 + ,test_hive_1674 as test_hive_1674 + ,test_hive_1677 as test_hive_1677 + ,test_hive_431 as test_hive_431 + ,test_hive_1675 as test_hive_1675 + ,test_hive_1678 as test_hive_1678 + ,test_hive_1679 as test_hive_1679 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1685 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1683 +PREHOOK: Input: default@test_hive_1685 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1684 +POSTHOOK: query: create view test_hive_1684 +as +select + test_hive_1676 as test_hive_1676 + ,test_hive_1674 as test_hive_1674 + ,test_hive_1677 as test_hive_1677 + ,test_hive_431 as test_hive_431 + ,test_hive_1675 as test_hive_1675 + ,test_hive_1678 as test_hive_1678 + ,test_hive_1679 as test_hive_1679 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1685 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1683 +POSTHOOK: Input: default@test_hive_1685 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1684 +POSTHOOK: Lineage: test_hive_1684.creation_date EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.ds EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.ds_ts SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.source_file_name SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_1674 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1674, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_1675 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1675, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_1676 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1676, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_1677 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1677, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_1678 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1678, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_1679 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1679, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.test_hive_431 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_431, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1684.ts EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1681 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1681 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1681 +as +select t1.* +from test_hive_1684 t1 +inner join test_hive_1682 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1682 +PREHOOK: Input: default@test_hive_1683 +PREHOOK: Input: default@test_hive_1684 +PREHOOK: Input: default@test_hive_1685 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1681 +POSTHOOK: query: create view test_hive_1681 +as +select t1.* +from test_hive_1684 t1 +inner join test_hive_1682 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1682 +POSTHOOK: Input: default@test_hive_1683 +POSTHOOK: Input: default@test_hive_1684 +POSTHOOK: Input: default@test_hive_1685 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1681 +POSTHOOK: Lineage: test_hive_1681.creation_date EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.ds EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.ds_ts SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.source_file_name SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_1674 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1674, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_1675 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1675, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_1676 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1676, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_1677 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1677, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_1678 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1678, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_1679 EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_1679, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.test_hive_431 SIMPLE [(test_hive_1683)test_hive_1683.FieldSchema(name:test_hive_431, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1681.ts EXPRESSION [(test_hive_1683)test_hive_1683.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1668 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1668 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1668 +( + test_hive_1662 string + ,test_hive_1660 string + ,test_hive_1663 string + ,test_hive_430 string + ,test_hive_1661 string + ,test_hive_1666 string + ,test_hive_1665 string + ,test_hive_1664 string + ,test_hive_1667 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1668 +POSTHOOK: query: create table test_hive_1668 +( + test_hive_1662 string + ,test_hive_1660 string + ,test_hive_1663 string + ,test_hive_430 string + ,test_hive_1661 string + ,test_hive_1666 string + ,test_hive_1665 string + ,test_hive_1664 string + ,test_hive_1667 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1668 +PREHOOK: query: create table if not exists test_hive_1671 +( + test_hive_1662 string + ,test_hive_1660 string + ,test_hive_1663 string + ,test_hive_430 string + ,test_hive_1661 string + ,test_hive_1666 string + ,test_hive_1665 string + ,test_hive_1664 string + ,test_hive_1667 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1671 +POSTHOOK: query: create table if not exists test_hive_1671 +( + test_hive_1662 string + ,test_hive_1660 string + ,test_hive_1663 string + ,test_hive_430 string + ,test_hive_1661 string + ,test_hive_1666 string + ,test_hive_1665 string + ,test_hive_1664 string + ,test_hive_1667 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1671 +PREHOOK: query: drop table if exists test_hive_1670 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1670 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1670 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1670 +POSTHOOK: query: create table if not exists test_hive_1670 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1670 +PREHOOK: query: drop view if exists test_hive_1673 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1673 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1673 +as +select + cast(test_hive_1662 as int) as test_hive_1662 + ,cast(test_hive_1660 as int) as test_hive_1660 + ,cast(test_hive_1663 as int) as test_hive_1663 + ,cast(test_hive_430 as string) as test_hive_430 + ,cast(test_hive_1661 as string) as test_hive_1661 + ,cast(test_hive_1666 as string) as test_hive_1666 + ,cast(test_hive_1665 as string) as test_hive_1665 + ,cast(test_hive_1664 as string) as test_hive_1664 + ,cast(from_unixtime(unix_timestamp(test_hive_1667,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1667 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1671 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1671 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1673 +POSTHOOK: query: create view if not exists test_hive_1673 +as +select + cast(test_hive_1662 as int) as test_hive_1662 + ,cast(test_hive_1660 as int) as test_hive_1660 + ,cast(test_hive_1663 as int) as test_hive_1663 + ,cast(test_hive_430 as string) as test_hive_430 + ,cast(test_hive_1661 as string) as test_hive_1661 + ,cast(test_hive_1666 as string) as test_hive_1666 + ,cast(test_hive_1665 as string) as test_hive_1665 + ,cast(test_hive_1664 as string) as test_hive_1664 + ,cast(from_unixtime(unix_timestamp(test_hive_1667,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1667 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1671 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1671 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1673 +POSTHOOK: Lineage: test_hive_1673.creation_date EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.ds EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.ds_ts SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.source_file_name SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1660 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1660, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1661 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1661, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1662 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1662, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1663 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1663, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1664 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1664, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1665 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1665, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1666 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1666, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_1667 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1667, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.test_hive_430 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_430, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1673.ts EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1672 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1672 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1672 +as +select + test_hive_1662 as test_hive_1662 + ,test_hive_1660 as test_hive_1660 + ,test_hive_1663 as test_hive_1663 + ,test_hive_430 as test_hive_430 + ,test_hive_1661 as test_hive_1661 + ,test_hive_1666 as test_hive_1666 + ,test_hive_1665 as test_hive_1665 + ,test_hive_1664 as test_hive_1664 + ,test_hive_1667 as test_hive_1667 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1673 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1671 +PREHOOK: Input: default@test_hive_1673 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1672 +POSTHOOK: query: create view test_hive_1672 +as +select + test_hive_1662 as test_hive_1662 + ,test_hive_1660 as test_hive_1660 + ,test_hive_1663 as test_hive_1663 + ,test_hive_430 as test_hive_430 + ,test_hive_1661 as test_hive_1661 + ,test_hive_1666 as test_hive_1666 + ,test_hive_1665 as test_hive_1665 + ,test_hive_1664 as test_hive_1664 + ,test_hive_1667 as test_hive_1667 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1673 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1671 +POSTHOOK: Input: default@test_hive_1673 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1672 +POSTHOOK: Lineage: test_hive_1672.creation_date EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.ds EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.ds_ts SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.source_file_name SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1660 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1660, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1661 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1661, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1662 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1662, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1663 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1663, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1664 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1664, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1665 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1665, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1666 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1666, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_1667 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1667, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.test_hive_430 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_430, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1672.ts EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1669 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1669 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1669 +as +select t1.* +from test_hive_1672 t1 +inner join test_hive_1670 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1670 +PREHOOK: Input: default@test_hive_1671 +PREHOOK: Input: default@test_hive_1672 +PREHOOK: Input: default@test_hive_1673 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1669 +POSTHOOK: query: create view test_hive_1669 +as +select t1.* +from test_hive_1672 t1 +inner join test_hive_1670 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1670 +POSTHOOK: Input: default@test_hive_1671 +POSTHOOK: Input: default@test_hive_1672 +POSTHOOK: Input: default@test_hive_1673 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1669 +POSTHOOK: Lineage: test_hive_1669.creation_date EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.ds EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.ds_ts SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.source_file_name SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1660 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1660, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1661 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1661, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1662 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1662, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1663 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1663, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1664 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1664, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1665 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1665, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1666 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1666, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_1667 EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_1667, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.test_hive_430 SIMPLE [(test_hive_1671)test_hive_1671.FieldSchema(name:test_hive_430, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1669.ts EXPRESSION [(test_hive_1671)test_hive_1671.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1654 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1654 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1654 +( + test_hive_1650 string + ,test_hive_1648 string + ,test_hive_1651 string + ,test_hive_429 string + ,test_hive_1649 string + ,test_hive_1652 string + ,test_hive_1653 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1654 +POSTHOOK: query: create table test_hive_1654 +( + test_hive_1650 string + ,test_hive_1648 string + ,test_hive_1651 string + ,test_hive_429 string + ,test_hive_1649 string + ,test_hive_1652 string + ,test_hive_1653 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1654 +PREHOOK: query: create table if not exists test_hive_1657 +( + test_hive_1650 string + ,test_hive_1648 string + ,test_hive_1651 string + ,test_hive_429 string + ,test_hive_1649 string + ,test_hive_1652 string + ,test_hive_1653 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1657 +POSTHOOK: query: create table if not exists test_hive_1657 +( + test_hive_1650 string + ,test_hive_1648 string + ,test_hive_1651 string + ,test_hive_429 string + ,test_hive_1649 string + ,test_hive_1652 string + ,test_hive_1653 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1657 +PREHOOK: query: drop table if exists test_hive_1656 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1656 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1656 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1656 +POSTHOOK: query: create table if not exists test_hive_1656 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1656 +PREHOOK: query: drop view if exists test_hive_1659 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1659 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1659 +as +select + cast(test_hive_1650 as int) as test_hive_1650 + ,cast(test_hive_1648 as int) as test_hive_1648 + ,cast(test_hive_1651 as int) as test_hive_1651 + ,cast(test_hive_429 as string) as test_hive_429 + ,cast(test_hive_1649 as string) as test_hive_1649 + ,cast(test_hive_1652 as string) as test_hive_1652 + ,cast(from_unixtime(unix_timestamp(test_hive_1653,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1653 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1657 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1657 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1659 +POSTHOOK: query: create view if not exists test_hive_1659 +as +select + cast(test_hive_1650 as int) as test_hive_1650 + ,cast(test_hive_1648 as int) as test_hive_1648 + ,cast(test_hive_1651 as int) as test_hive_1651 + ,cast(test_hive_429 as string) as test_hive_429 + ,cast(test_hive_1649 as string) as test_hive_1649 + ,cast(test_hive_1652 as string) as test_hive_1652 + ,cast(from_unixtime(unix_timestamp(test_hive_1653,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1653 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1657 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1657 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1659 +POSTHOOK: Lineage: test_hive_1659.creation_date EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.ds EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.ds_ts SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.source_file_name SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_1648 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1648, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_1649 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1649, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_1650 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1650, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_1651 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1651, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_1652 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1652, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_1653 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1653, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.test_hive_429 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_429, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1659.ts EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1658 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1658 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1658 +as +select + test_hive_1650 as test_hive_1650 + ,test_hive_1648 as test_hive_1648 + ,test_hive_1651 as test_hive_1651 + ,test_hive_429 as test_hive_429 + ,test_hive_1649 as test_hive_1649 + ,test_hive_1652 as test_hive_1652 + ,test_hive_1653 as test_hive_1653 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1659 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1657 +PREHOOK: Input: default@test_hive_1659 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1658 +POSTHOOK: query: create view test_hive_1658 +as +select + test_hive_1650 as test_hive_1650 + ,test_hive_1648 as test_hive_1648 + ,test_hive_1651 as test_hive_1651 + ,test_hive_429 as test_hive_429 + ,test_hive_1649 as test_hive_1649 + ,test_hive_1652 as test_hive_1652 + ,test_hive_1653 as test_hive_1653 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1659 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1657 +POSTHOOK: Input: default@test_hive_1659 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1658 +POSTHOOK: Lineage: test_hive_1658.creation_date EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.ds EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.ds_ts SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.source_file_name SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_1648 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1648, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_1649 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1649, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_1650 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1650, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_1651 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1651, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_1652 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1652, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_1653 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1653, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.test_hive_429 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_429, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1658.ts EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1655 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1655 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1655 +as +select t1.* +from test_hive_1658 t1 +inner join test_hive_1656 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1656 +PREHOOK: Input: default@test_hive_1657 +PREHOOK: Input: default@test_hive_1658 +PREHOOK: Input: default@test_hive_1659 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1655 +POSTHOOK: query: create view test_hive_1655 +as +select t1.* +from test_hive_1658 t1 +inner join test_hive_1656 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1656 +POSTHOOK: Input: default@test_hive_1657 +POSTHOOK: Input: default@test_hive_1658 +POSTHOOK: Input: default@test_hive_1659 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1655 +POSTHOOK: Lineage: test_hive_1655.creation_date EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.ds EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.ds_ts SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.source_file_name SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_1648 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1648, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_1649 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1649, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_1650 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1650, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_1651 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1651, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_1652 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1652, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_1653 EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_1653, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.test_hive_429 SIMPLE [(test_hive_1657)test_hive_1657.FieldSchema(name:test_hive_429, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1655.ts EXPRESSION [(test_hive_1657)test_hive_1657.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1642 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1642 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1642 +( + test_hive_1636 string + ,test_hive_1634 string + ,test_hive_1637 string + ,test_hive_428 string + ,test_hive_1635 string + ,test_hive_1640 string + ,test_hive_1639 string + ,test_hive_1638 string + ,test_hive_1641 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1642 +POSTHOOK: query: create table test_hive_1642 +( + test_hive_1636 string + ,test_hive_1634 string + ,test_hive_1637 string + ,test_hive_428 string + ,test_hive_1635 string + ,test_hive_1640 string + ,test_hive_1639 string + ,test_hive_1638 string + ,test_hive_1641 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1642 +PREHOOK: query: create table if not exists test_hive_1645 +( + test_hive_1636 string + ,test_hive_1634 string + ,test_hive_1637 string + ,test_hive_428 string + ,test_hive_1635 string + ,test_hive_1640 string + ,test_hive_1639 string + ,test_hive_1638 string + ,test_hive_1641 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1645 +POSTHOOK: query: create table if not exists test_hive_1645 +( + test_hive_1636 string + ,test_hive_1634 string + ,test_hive_1637 string + ,test_hive_428 string + ,test_hive_1635 string + ,test_hive_1640 string + ,test_hive_1639 string + ,test_hive_1638 string + ,test_hive_1641 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1645 +PREHOOK: query: drop table if exists test_hive_1644 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1644 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1644 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1644 +POSTHOOK: query: create table if not exists test_hive_1644 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1644 +PREHOOK: query: drop view if exists test_hive_1647 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1647 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1647 +as +select + cast(test_hive_1636 as int) as test_hive_1636 + ,cast(test_hive_1634 as int) as test_hive_1634 + ,cast(test_hive_1637 as int) as test_hive_1637 + ,cast(test_hive_428 as string) as test_hive_428 + ,cast(test_hive_1635 as string) as test_hive_1635 + ,cast(test_hive_1640 as string) as test_hive_1640 + ,cast(test_hive_1639 as string) as test_hive_1639 + ,cast(test_hive_1638 as string) as test_hive_1638 + ,cast(from_unixtime(unix_timestamp(test_hive_1641,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1641 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1645 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1645 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1647 +POSTHOOK: query: create view if not exists test_hive_1647 +as +select + cast(test_hive_1636 as int) as test_hive_1636 + ,cast(test_hive_1634 as int) as test_hive_1634 + ,cast(test_hive_1637 as int) as test_hive_1637 + ,cast(test_hive_428 as string) as test_hive_428 + ,cast(test_hive_1635 as string) as test_hive_1635 + ,cast(test_hive_1640 as string) as test_hive_1640 + ,cast(test_hive_1639 as string) as test_hive_1639 + ,cast(test_hive_1638 as string) as test_hive_1638 + ,cast(from_unixtime(unix_timestamp(test_hive_1641,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1641 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1645 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1645 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1647 +POSTHOOK: Lineage: test_hive_1647.creation_date EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.ds EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.ds_ts SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.source_file_name SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1634 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1634, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1635 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1635, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1636 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1636, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1637 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1637, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1638 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1638, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1639 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1639, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1640 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1640, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_1641 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1641, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.test_hive_428 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_428, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1647.ts EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1646 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1646 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1646 +as +select + test_hive_1636 as test_hive_1636 + ,test_hive_1634 as test_hive_1634 + ,test_hive_1637 as test_hive_1637 + ,test_hive_428 as test_hive_428 + ,test_hive_1635 as test_hive_1635 + ,test_hive_1640 as test_hive_1640 + ,test_hive_1639 as test_hive_1639 + ,test_hive_1638 as test_hive_1638 + ,test_hive_1641 as test_hive_1641 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1647 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1645 +PREHOOK: Input: default@test_hive_1647 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1646 +POSTHOOK: query: create view test_hive_1646 +as +select + test_hive_1636 as test_hive_1636 + ,test_hive_1634 as test_hive_1634 + ,test_hive_1637 as test_hive_1637 + ,test_hive_428 as test_hive_428 + ,test_hive_1635 as test_hive_1635 + ,test_hive_1640 as test_hive_1640 + ,test_hive_1639 as test_hive_1639 + ,test_hive_1638 as test_hive_1638 + ,test_hive_1641 as test_hive_1641 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1647 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1645 +POSTHOOK: Input: default@test_hive_1647 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1646 +POSTHOOK: Lineage: test_hive_1646.creation_date EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.ds EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.ds_ts SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.source_file_name SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1634 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1634, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1635 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1635, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1636 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1636, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1637 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1637, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1638 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1638, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1639 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1639, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1640 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1640, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_1641 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1641, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.test_hive_428 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_428, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1646.ts EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1643 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1643 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1643 +as +select t1.* +from test_hive_1646 t1 +inner join test_hive_1644 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1644 +PREHOOK: Input: default@test_hive_1645 +PREHOOK: Input: default@test_hive_1646 +PREHOOK: Input: default@test_hive_1647 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1643 +POSTHOOK: query: create view test_hive_1643 +as +select t1.* +from test_hive_1646 t1 +inner join test_hive_1644 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1644 +POSTHOOK: Input: default@test_hive_1645 +POSTHOOK: Input: default@test_hive_1646 +POSTHOOK: Input: default@test_hive_1647 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1643 +POSTHOOK: Lineage: test_hive_1643.creation_date EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.ds EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.ds_ts SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.source_file_name SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1634 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1634, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1635 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1635, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1636 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1636, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1637 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1637, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1638 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1638, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1639 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1639, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1640 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1640, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_1641 EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_1641, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.test_hive_428 SIMPLE [(test_hive_1645)test_hive_1645.FieldSchema(name:test_hive_428, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1643.ts EXPRESSION [(test_hive_1645)test_hive_1645.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1628 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1628 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1628 +( + test_hive_1622 string + ,test_hive_1620 string + ,test_hive_1623 string + ,test_hive_427 string + ,test_hive_1621 string + ,test_hive_1626 string + ,test_hive_1625 string + ,test_hive_1624 string + ,test_hive_1627 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1628 +POSTHOOK: query: create table test_hive_1628 +( + test_hive_1622 string + ,test_hive_1620 string + ,test_hive_1623 string + ,test_hive_427 string + ,test_hive_1621 string + ,test_hive_1626 string + ,test_hive_1625 string + ,test_hive_1624 string + ,test_hive_1627 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1628 +PREHOOK: query: create table if not exists test_hive_1631 +( + test_hive_1622 string + ,test_hive_1620 string + ,test_hive_1623 string + ,test_hive_427 string + ,test_hive_1621 string + ,test_hive_1626 string + ,test_hive_1625 string + ,test_hive_1624 string + ,test_hive_1627 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1631 +POSTHOOK: query: create table if not exists test_hive_1631 +( + test_hive_1622 string + ,test_hive_1620 string + ,test_hive_1623 string + ,test_hive_427 string + ,test_hive_1621 string + ,test_hive_1626 string + ,test_hive_1625 string + ,test_hive_1624 string + ,test_hive_1627 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1631 +PREHOOK: query: drop table if exists test_hive_1630 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1630 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1630 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1630 +POSTHOOK: query: create table if not exists test_hive_1630 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1630 +PREHOOK: query: drop view if exists test_hive_1633 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1633 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1633 +as +select + cast(test_hive_1622 as int) as test_hive_1622 + ,cast(test_hive_1620 as int) as test_hive_1620 + ,cast(test_hive_1623 as int) as test_hive_1623 + ,cast(test_hive_427 as string) as test_hive_427 + ,cast(test_hive_1621 as string) as test_hive_1621 + ,cast(test_hive_1626 as string) as test_hive_1626 + ,cast(test_hive_1625 as string) as test_hive_1625 + ,cast(test_hive_1624 as string) as test_hive_1624 + ,cast(from_unixtime(unix_timestamp(test_hive_1627,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1627 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1631 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1631 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1633 +POSTHOOK: query: create view if not exists test_hive_1633 +as +select + cast(test_hive_1622 as int) as test_hive_1622 + ,cast(test_hive_1620 as int) as test_hive_1620 + ,cast(test_hive_1623 as int) as test_hive_1623 + ,cast(test_hive_427 as string) as test_hive_427 + ,cast(test_hive_1621 as string) as test_hive_1621 + ,cast(test_hive_1626 as string) as test_hive_1626 + ,cast(test_hive_1625 as string) as test_hive_1625 + ,cast(test_hive_1624 as string) as test_hive_1624 + ,cast(from_unixtime(unix_timestamp(test_hive_1627,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1627 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1631 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1631 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1633 +POSTHOOK: Lineage: test_hive_1633.creation_date EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.ds EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.ds_ts SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.source_file_name SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1620 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1620, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1621 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1621, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1622 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1622, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1623 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1623, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1624 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1624, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1625 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1625, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1626 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1626, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_1627 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1627, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.test_hive_427 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_427, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1633.ts EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1632 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1632 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1632 +as +select + test_hive_1622 as test_hive_1622 + ,test_hive_1620 as test_hive_1620 + ,test_hive_1623 as test_hive_1623 + ,test_hive_427 as test_hive_427 + ,test_hive_1621 as test_hive_1621 + ,test_hive_1626 as test_hive_1626 + ,test_hive_1625 as test_hive_1625 + ,test_hive_1624 as test_hive_1624 + ,test_hive_1627 as test_hive_1627 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1633 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1631 +PREHOOK: Input: default@test_hive_1633 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1632 +POSTHOOK: query: create view test_hive_1632 +as +select + test_hive_1622 as test_hive_1622 + ,test_hive_1620 as test_hive_1620 + ,test_hive_1623 as test_hive_1623 + ,test_hive_427 as test_hive_427 + ,test_hive_1621 as test_hive_1621 + ,test_hive_1626 as test_hive_1626 + ,test_hive_1625 as test_hive_1625 + ,test_hive_1624 as test_hive_1624 + ,test_hive_1627 as test_hive_1627 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1633 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1631 +POSTHOOK: Input: default@test_hive_1633 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1632 +POSTHOOK: Lineage: test_hive_1632.creation_date EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.ds EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.ds_ts SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.source_file_name SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1620 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1620, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1621 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1621, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1622 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1622, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1623 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1623, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1624 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1624, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1625 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1625, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1626 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1626, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_1627 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1627, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.test_hive_427 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_427, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1632.ts EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1629 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1629 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1629 +as +select t1.* +from test_hive_1632 t1 +inner join test_hive_1630 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1630 +PREHOOK: Input: default@test_hive_1631 +PREHOOK: Input: default@test_hive_1632 +PREHOOK: Input: default@test_hive_1633 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1629 +POSTHOOK: query: create view test_hive_1629 +as +select t1.* +from test_hive_1632 t1 +inner join test_hive_1630 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1630 +POSTHOOK: Input: default@test_hive_1631 +POSTHOOK: Input: default@test_hive_1632 +POSTHOOK: Input: default@test_hive_1633 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1629 +POSTHOOK: Lineage: test_hive_1629.creation_date EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.ds EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.ds_ts SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.source_file_name SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1620 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1620, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1621 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1621, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1622 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1622, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1623 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1623, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1624 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1624, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1625 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1625, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1626 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1626, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_1627 EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_1627, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.test_hive_427 SIMPLE [(test_hive_1631)test_hive_1631.FieldSchema(name:test_hive_427, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1629.ts EXPRESSION [(test_hive_1631)test_hive_1631.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1614 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1614 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1614 +( + test_hive_1608 string + ,test_hive_1606 string + ,test_hive_1609 string + ,test_hive_426 string + ,test_hive_1607 string + ,test_hive_1612 string + ,test_hive_1611 string + ,test_hive_1610 string + ,test_hive_1613 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1614 +POSTHOOK: query: create table test_hive_1614 +( + test_hive_1608 string + ,test_hive_1606 string + ,test_hive_1609 string + ,test_hive_426 string + ,test_hive_1607 string + ,test_hive_1612 string + ,test_hive_1611 string + ,test_hive_1610 string + ,test_hive_1613 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1614 +PREHOOK: query: create table if not exists test_hive_1617 +( + test_hive_1608 string + ,test_hive_1606 string + ,test_hive_1609 string + ,test_hive_426 string + ,test_hive_1607 string + ,test_hive_1612 string + ,test_hive_1611 string + ,test_hive_1610 string + ,test_hive_1613 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1617 +POSTHOOK: query: create table if not exists test_hive_1617 +( + test_hive_1608 string + ,test_hive_1606 string + ,test_hive_1609 string + ,test_hive_426 string + ,test_hive_1607 string + ,test_hive_1612 string + ,test_hive_1611 string + ,test_hive_1610 string + ,test_hive_1613 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1617 +PREHOOK: query: drop table if exists test_hive_1616 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1616 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1616 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1616 +POSTHOOK: query: create table if not exists test_hive_1616 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1616 +PREHOOK: query: drop view if exists test_hive_1619 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1619 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1619 +as +select + cast(test_hive_1608 as int) as test_hive_1608 + ,cast(test_hive_1606 as int) as test_hive_1606 + ,cast(test_hive_1609 as int) as test_hive_1609 + ,cast(test_hive_426 as string) as test_hive_426 + ,cast(test_hive_1607 as string) as test_hive_1607 + ,cast(test_hive_1612 as string) as test_hive_1612 + ,cast(test_hive_1611 as string) as test_hive_1611 + ,cast(test_hive_1610 as string) as test_hive_1610 + ,cast(from_unixtime(unix_timestamp(test_hive_1613,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1613 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1617 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1617 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1619 +POSTHOOK: query: create view if not exists test_hive_1619 +as +select + cast(test_hive_1608 as int) as test_hive_1608 + ,cast(test_hive_1606 as int) as test_hive_1606 + ,cast(test_hive_1609 as int) as test_hive_1609 + ,cast(test_hive_426 as string) as test_hive_426 + ,cast(test_hive_1607 as string) as test_hive_1607 + ,cast(test_hive_1612 as string) as test_hive_1612 + ,cast(test_hive_1611 as string) as test_hive_1611 + ,cast(test_hive_1610 as string) as test_hive_1610 + ,cast(from_unixtime(unix_timestamp(test_hive_1613,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1613 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1617 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1617 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1619 +POSTHOOK: Lineage: test_hive_1619.creation_date EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.ds EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.ds_ts SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.source_file_name SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1606 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1606, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1607 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1607, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1608 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1608, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1609 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1609, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1610 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1610, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1611 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1611, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1612 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1612, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_1613 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1613, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.test_hive_426 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_426, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1619.ts EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1618 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1618 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1618 +as +select + test_hive_1608 as test_hive_1608 + ,test_hive_1606 as test_hive_1606 + ,test_hive_1609 as test_hive_1609 + ,test_hive_426 as test_hive_426 + ,test_hive_1607 as test_hive_1607 + ,test_hive_1612 as test_hive_1612 + ,test_hive_1611 as test_hive_1611 + ,test_hive_1610 as test_hive_1610 + ,test_hive_1613 as test_hive_1613 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1619 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1617 +PREHOOK: Input: default@test_hive_1619 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1618 +POSTHOOK: query: create view test_hive_1618 +as +select + test_hive_1608 as test_hive_1608 + ,test_hive_1606 as test_hive_1606 + ,test_hive_1609 as test_hive_1609 + ,test_hive_426 as test_hive_426 + ,test_hive_1607 as test_hive_1607 + ,test_hive_1612 as test_hive_1612 + ,test_hive_1611 as test_hive_1611 + ,test_hive_1610 as test_hive_1610 + ,test_hive_1613 as test_hive_1613 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1619 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1617 +POSTHOOK: Input: default@test_hive_1619 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1618 +POSTHOOK: Lineage: test_hive_1618.creation_date EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.ds EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.ds_ts SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.source_file_name SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1606 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1606, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1607 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1607, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1608 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1608, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1609 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1609, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1610 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1610, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1611 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1611, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1612 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1612, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_1613 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1613, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.test_hive_426 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_426, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1618.ts EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1615 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1615 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1615 +as +select t1.* +from test_hive_1618 t1 +inner join test_hive_1616 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1616 +PREHOOK: Input: default@test_hive_1617 +PREHOOK: Input: default@test_hive_1618 +PREHOOK: Input: default@test_hive_1619 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1615 +POSTHOOK: query: create view test_hive_1615 +as +select t1.* +from test_hive_1618 t1 +inner join test_hive_1616 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1616 +POSTHOOK: Input: default@test_hive_1617 +POSTHOOK: Input: default@test_hive_1618 +POSTHOOK: Input: default@test_hive_1619 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1615 +POSTHOOK: Lineage: test_hive_1615.creation_date EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.ds EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.ds_ts SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.source_file_name SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1606 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1606, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1607 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1607, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1608 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1608, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1609 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1609, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1610 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1610, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1611 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1611, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1612 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1612, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_1613 EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_1613, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.test_hive_426 SIMPLE [(test_hive_1617)test_hive_1617.FieldSchema(name:test_hive_426, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1615.ts EXPRESSION [(test_hive_1617)test_hive_1617.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1600 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1600 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1600 +( + test_hive_1596 string + ,test_hive_1594 string + ,test_hive_1597 string + ,test_hive_425 string + ,test_hive_1595 string + ,test_hive_1598 string + ,test_hive_1599 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1600 +POSTHOOK: query: create table test_hive_1600 +( + test_hive_1596 string + ,test_hive_1594 string + ,test_hive_1597 string + ,test_hive_425 string + ,test_hive_1595 string + ,test_hive_1598 string + ,test_hive_1599 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1600 +PREHOOK: query: create table if not exists test_hive_1603 +( + test_hive_1596 string + ,test_hive_1594 string + ,test_hive_1597 string + ,test_hive_425 string + ,test_hive_1595 string + ,test_hive_1598 string + ,test_hive_1599 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1603 +POSTHOOK: query: create table if not exists test_hive_1603 +( + test_hive_1596 string + ,test_hive_1594 string + ,test_hive_1597 string + ,test_hive_425 string + ,test_hive_1595 string + ,test_hive_1598 string + ,test_hive_1599 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1603 +PREHOOK: query: drop table if exists test_hive_1602 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1602 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1602 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1602 +POSTHOOK: query: create table if not exists test_hive_1602 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1602 +PREHOOK: query: drop view if exists test_hive_1605 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1605 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1605 +as +select + cast(test_hive_1596 as int) as test_hive_1596 + ,cast(test_hive_1594 as int) as test_hive_1594 + ,cast(test_hive_1597 as int) as test_hive_1597 + ,cast(test_hive_425 as string) as test_hive_425 + ,cast(test_hive_1595 as string) as test_hive_1595 + ,cast(test_hive_1598 as string) as test_hive_1598 + ,cast(from_unixtime(unix_timestamp(test_hive_1599,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1599 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1603 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1603 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1605 +POSTHOOK: query: create view if not exists test_hive_1605 +as +select + cast(test_hive_1596 as int) as test_hive_1596 + ,cast(test_hive_1594 as int) as test_hive_1594 + ,cast(test_hive_1597 as int) as test_hive_1597 + ,cast(test_hive_425 as string) as test_hive_425 + ,cast(test_hive_1595 as string) as test_hive_1595 + ,cast(test_hive_1598 as string) as test_hive_1598 + ,cast(from_unixtime(unix_timestamp(test_hive_1599,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1599 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1603 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1603 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1605 +POSTHOOK: Lineage: test_hive_1605.creation_date EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.ds EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.ds_ts SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.source_file_name SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_1594 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1594, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_1595 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1595, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_1596 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1596, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_1597 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1597, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_1598 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1598, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_1599 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1599, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.test_hive_425 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_425, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1605.ts EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1604 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1604 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1604 +as +select + test_hive_1596 as test_hive_1596 + ,test_hive_1594 as test_hive_1594 + ,test_hive_1597 as test_hive_1597 + ,test_hive_425 as test_hive_425 + ,test_hive_1595 as test_hive_1595 + ,test_hive_1598 as test_hive_1598 + ,test_hive_1599 as test_hive_1599 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1605 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1603 +PREHOOK: Input: default@test_hive_1605 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1604 +POSTHOOK: query: create view test_hive_1604 +as +select + test_hive_1596 as test_hive_1596 + ,test_hive_1594 as test_hive_1594 + ,test_hive_1597 as test_hive_1597 + ,test_hive_425 as test_hive_425 + ,test_hive_1595 as test_hive_1595 + ,test_hive_1598 as test_hive_1598 + ,test_hive_1599 as test_hive_1599 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1605 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1603 +POSTHOOK: Input: default@test_hive_1605 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1604 +POSTHOOK: Lineage: test_hive_1604.creation_date EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.ds EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.ds_ts SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.source_file_name SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_1594 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1594, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_1595 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1595, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_1596 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1596, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_1597 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1597, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_1598 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1598, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_1599 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1599, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.test_hive_425 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_425, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1604.ts EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1601 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1601 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1601 +as +select t1.* +from test_hive_1604 t1 +inner join test_hive_1602 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1602 +PREHOOK: Input: default@test_hive_1603 +PREHOOK: Input: default@test_hive_1604 +PREHOOK: Input: default@test_hive_1605 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1601 +POSTHOOK: query: create view test_hive_1601 +as +select t1.* +from test_hive_1604 t1 +inner join test_hive_1602 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1602 +POSTHOOK: Input: default@test_hive_1603 +POSTHOOK: Input: default@test_hive_1604 +POSTHOOK: Input: default@test_hive_1605 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1601 +POSTHOOK: Lineage: test_hive_1601.creation_date EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.ds EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.ds_ts SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.source_file_name SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_1594 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1594, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_1595 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1595, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_1596 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1596, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_1597 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1597, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_1598 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1598, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_1599 EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_1599, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.test_hive_425 SIMPLE [(test_hive_1603)test_hive_1603.FieldSchema(name:test_hive_425, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1601.ts EXPRESSION [(test_hive_1603)test_hive_1603.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1588 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1588 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1588 +( + test_hive_1584 string + ,test_hive_1582 string + ,test_hive_1585 string + ,test_hive_424 string + ,test_hive_1583 string + ,test_hive_1586 string + ,test_hive_1587 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1588 +POSTHOOK: query: create table test_hive_1588 +( + test_hive_1584 string + ,test_hive_1582 string + ,test_hive_1585 string + ,test_hive_424 string + ,test_hive_1583 string + ,test_hive_1586 string + ,test_hive_1587 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1588 +PREHOOK: query: create table if not exists test_hive_1591 +( + test_hive_1584 string + ,test_hive_1582 string + ,test_hive_1585 string + ,test_hive_424 string + ,test_hive_1583 string + ,test_hive_1586 string + ,test_hive_1587 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1591 +POSTHOOK: query: create table if not exists test_hive_1591 +( + test_hive_1584 string + ,test_hive_1582 string + ,test_hive_1585 string + ,test_hive_424 string + ,test_hive_1583 string + ,test_hive_1586 string + ,test_hive_1587 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1591 +PREHOOK: query: drop table if exists test_hive_1590 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1590 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1590 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1590 +POSTHOOK: query: create table if not exists test_hive_1590 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1590 +PREHOOK: query: drop view if exists test_hive_1593 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1593 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1593 +as +select + cast(test_hive_1584 as int) as test_hive_1584 + ,cast(test_hive_1582 as int) as test_hive_1582 + ,cast(test_hive_1585 as int) as test_hive_1585 + ,cast(test_hive_424 as string) as test_hive_424 + ,cast(test_hive_1583 as string) as test_hive_1583 + ,cast(test_hive_1586 as string) as test_hive_1586 + ,cast(from_unixtime(unix_timestamp(test_hive_1587,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1587 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1591 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1591 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1593 +POSTHOOK: query: create view if not exists test_hive_1593 +as +select + cast(test_hive_1584 as int) as test_hive_1584 + ,cast(test_hive_1582 as int) as test_hive_1582 + ,cast(test_hive_1585 as int) as test_hive_1585 + ,cast(test_hive_424 as string) as test_hive_424 + ,cast(test_hive_1583 as string) as test_hive_1583 + ,cast(test_hive_1586 as string) as test_hive_1586 + ,cast(from_unixtime(unix_timestamp(test_hive_1587,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1587 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1591 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1591 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1593 +POSTHOOK: Lineage: test_hive_1593.creation_date EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.ds EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.ds_ts SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.source_file_name SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_1582 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1582, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_1583 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1583, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_1584 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1584, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_1585 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1585, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_1586 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1586, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_1587 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1587, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.test_hive_424 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_424, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1593.ts EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1592 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1592 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1592 +as +select + test_hive_1584 as test_hive_1584 + ,test_hive_1582 as test_hive_1582 + ,test_hive_1585 as test_hive_1585 + ,test_hive_424 as test_hive_424 + ,test_hive_1583 as test_hive_1583 + ,test_hive_1586 as test_hive_1586 + ,test_hive_1587 as test_hive_1587 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1593 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1591 +PREHOOK: Input: default@test_hive_1593 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1592 +POSTHOOK: query: create view test_hive_1592 +as +select + test_hive_1584 as test_hive_1584 + ,test_hive_1582 as test_hive_1582 + ,test_hive_1585 as test_hive_1585 + ,test_hive_424 as test_hive_424 + ,test_hive_1583 as test_hive_1583 + ,test_hive_1586 as test_hive_1586 + ,test_hive_1587 as test_hive_1587 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1593 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1591 +POSTHOOK: Input: default@test_hive_1593 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1592 +POSTHOOK: Lineage: test_hive_1592.creation_date EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.ds EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.ds_ts SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.source_file_name SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_1582 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1582, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_1583 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1583, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_1584 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1584, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_1585 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1585, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_1586 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1586, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_1587 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1587, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.test_hive_424 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_424, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1592.ts EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1589 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1589 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1589 +as +select t1.* +from test_hive_1592 t1 +inner join test_hive_1590 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1590 +PREHOOK: Input: default@test_hive_1591 +PREHOOK: Input: default@test_hive_1592 +PREHOOK: Input: default@test_hive_1593 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1589 +POSTHOOK: query: create view test_hive_1589 +as +select t1.* +from test_hive_1592 t1 +inner join test_hive_1590 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1590 +POSTHOOK: Input: default@test_hive_1591 +POSTHOOK: Input: default@test_hive_1592 +POSTHOOK: Input: default@test_hive_1593 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1589 +POSTHOOK: Lineage: test_hive_1589.creation_date EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.ds EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.ds_ts SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.source_file_name SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_1582 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1582, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_1583 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1583, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_1584 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1584, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_1585 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1585, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_1586 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1586, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_1587 EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_1587, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.test_hive_424 SIMPLE [(test_hive_1591)test_hive_1591.FieldSchema(name:test_hive_424, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1589.ts EXPRESSION [(test_hive_1591)test_hive_1591.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1576 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1576 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1576 +( + test_hive_1570 string + ,test_hive_1568 string + ,test_hive_1571 string + ,test_hive_423 string + ,test_hive_1569 string + ,test_hive_1574 string + ,test_hive_1573 string + ,test_hive_1572 string + ,test_hive_1575 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1576 +POSTHOOK: query: create table test_hive_1576 +( + test_hive_1570 string + ,test_hive_1568 string + ,test_hive_1571 string + ,test_hive_423 string + ,test_hive_1569 string + ,test_hive_1574 string + ,test_hive_1573 string + ,test_hive_1572 string + ,test_hive_1575 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1576 +PREHOOK: query: create table if not exists test_hive_1579 +( + test_hive_1570 string + ,test_hive_1568 string + ,test_hive_1571 string + ,test_hive_423 string + ,test_hive_1569 string + ,test_hive_1574 string + ,test_hive_1573 string + ,test_hive_1572 string + ,test_hive_1575 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1579 +POSTHOOK: query: create table if not exists test_hive_1579 +( + test_hive_1570 string + ,test_hive_1568 string + ,test_hive_1571 string + ,test_hive_423 string + ,test_hive_1569 string + ,test_hive_1574 string + ,test_hive_1573 string + ,test_hive_1572 string + ,test_hive_1575 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1579 +PREHOOK: query: drop table if exists test_hive_1578 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1578 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1578 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1578 +POSTHOOK: query: create table if not exists test_hive_1578 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1578 +PREHOOK: query: drop view if exists test_hive_1581 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1581 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1581 +as +select + cast(test_hive_1570 as int) as test_hive_1570 + ,cast(test_hive_1568 as int) as test_hive_1568 + ,cast(test_hive_1571 as int) as test_hive_1571 + ,cast(test_hive_423 as string) as test_hive_423 + ,cast(test_hive_1569 as string) as test_hive_1569 + ,cast(test_hive_1574 as string) as test_hive_1574 + ,cast(test_hive_1573 as string) as test_hive_1573 + ,cast(test_hive_1572 as string) as test_hive_1572 + ,cast(from_unixtime(unix_timestamp(test_hive_1575,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1575 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1579 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1579 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1581 +POSTHOOK: query: create view if not exists test_hive_1581 +as +select + cast(test_hive_1570 as int) as test_hive_1570 + ,cast(test_hive_1568 as int) as test_hive_1568 + ,cast(test_hive_1571 as int) as test_hive_1571 + ,cast(test_hive_423 as string) as test_hive_423 + ,cast(test_hive_1569 as string) as test_hive_1569 + ,cast(test_hive_1574 as string) as test_hive_1574 + ,cast(test_hive_1573 as string) as test_hive_1573 + ,cast(test_hive_1572 as string) as test_hive_1572 + ,cast(from_unixtime(unix_timestamp(test_hive_1575,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1575 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1579 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1579 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1581 +POSTHOOK: Lineage: test_hive_1581.creation_date EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.ds EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.ds_ts SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.source_file_name SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1568 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1568, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1569 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1569, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1570 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1570, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1571 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1571, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1572 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1572, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1573 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1573, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1574 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1574, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_1575 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1575, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.test_hive_423 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_423, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1581.ts EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1580 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1580 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1580 +as +select + test_hive_1570 as test_hive_1570 + ,test_hive_1568 as test_hive_1568 + ,test_hive_1571 as test_hive_1571 + ,test_hive_423 as test_hive_423 + ,test_hive_1569 as test_hive_1569 + ,test_hive_1574 as test_hive_1574 + ,test_hive_1573 as test_hive_1573 + ,test_hive_1572 as test_hive_1572 + ,test_hive_1575 as test_hive_1575 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1581 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1579 +PREHOOK: Input: default@test_hive_1581 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1580 +POSTHOOK: query: create view test_hive_1580 +as +select + test_hive_1570 as test_hive_1570 + ,test_hive_1568 as test_hive_1568 + ,test_hive_1571 as test_hive_1571 + ,test_hive_423 as test_hive_423 + ,test_hive_1569 as test_hive_1569 + ,test_hive_1574 as test_hive_1574 + ,test_hive_1573 as test_hive_1573 + ,test_hive_1572 as test_hive_1572 + ,test_hive_1575 as test_hive_1575 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1581 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1579 +POSTHOOK: Input: default@test_hive_1581 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1580 +POSTHOOK: Lineage: test_hive_1580.creation_date EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.ds EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.ds_ts SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.source_file_name SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1568 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1568, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1569 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1569, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1570 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1570, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1571 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1571, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1572 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1572, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1573 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1573, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1574 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1574, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_1575 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1575, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.test_hive_423 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_423, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1580.ts EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1577 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1577 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1577 +as +select t1.* +from test_hive_1580 t1 +inner join test_hive_1578 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1578 +PREHOOK: Input: default@test_hive_1579 +PREHOOK: Input: default@test_hive_1580 +PREHOOK: Input: default@test_hive_1581 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1577 +POSTHOOK: query: create view test_hive_1577 +as +select t1.* +from test_hive_1580 t1 +inner join test_hive_1578 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1578 +POSTHOOK: Input: default@test_hive_1579 +POSTHOOK: Input: default@test_hive_1580 +POSTHOOK: Input: default@test_hive_1581 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1577 +POSTHOOK: Lineage: test_hive_1577.creation_date EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.ds EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.ds_ts SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.source_file_name SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1568 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1568, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1569 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1569, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1570 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1570, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1571 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1571, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1572 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1572, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1573 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1573, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1574 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1574, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_1575 EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_1575, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.test_hive_423 SIMPLE [(test_hive_1579)test_hive_1579.FieldSchema(name:test_hive_423, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1577.ts EXPRESSION [(test_hive_1579)test_hive_1579.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1562 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1562 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1562 +( + test_hive_1558 string + ,test_hive_1556 string + ,test_hive_1559 string + ,test_hive_422 string + ,test_hive_1557 string + ,test_hive_1560 string + ,test_hive_1561 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1562 +POSTHOOK: query: create table test_hive_1562 +( + test_hive_1558 string + ,test_hive_1556 string + ,test_hive_1559 string + ,test_hive_422 string + ,test_hive_1557 string + ,test_hive_1560 string + ,test_hive_1561 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1562 +PREHOOK: query: create table if not exists test_hive_1565 +( + test_hive_1558 string + ,test_hive_1556 string + ,test_hive_1559 string + ,test_hive_422 string + ,test_hive_1557 string + ,test_hive_1560 string + ,test_hive_1561 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1565 +POSTHOOK: query: create table if not exists test_hive_1565 +( + test_hive_1558 string + ,test_hive_1556 string + ,test_hive_1559 string + ,test_hive_422 string + ,test_hive_1557 string + ,test_hive_1560 string + ,test_hive_1561 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1565 +PREHOOK: query: drop table if exists test_hive_1564 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1564 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1564 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1564 +POSTHOOK: query: create table if not exists test_hive_1564 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1564 +PREHOOK: query: drop view if exists test_hive_1567 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1567 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1567 +as +select + cast(test_hive_1558 as int) as test_hive_1558 + ,cast(test_hive_1556 as int) as test_hive_1556 + ,cast(test_hive_1559 as int) as test_hive_1559 + ,cast(test_hive_422 as string) as test_hive_422 + ,cast(test_hive_1557 as string) as test_hive_1557 + ,cast(test_hive_1560 as string) as test_hive_1560 + ,cast(from_unixtime(unix_timestamp(test_hive_1561,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1561 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1565 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1565 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1567 +POSTHOOK: query: create view if not exists test_hive_1567 +as +select + cast(test_hive_1558 as int) as test_hive_1558 + ,cast(test_hive_1556 as int) as test_hive_1556 + ,cast(test_hive_1559 as int) as test_hive_1559 + ,cast(test_hive_422 as string) as test_hive_422 + ,cast(test_hive_1557 as string) as test_hive_1557 + ,cast(test_hive_1560 as string) as test_hive_1560 + ,cast(from_unixtime(unix_timestamp(test_hive_1561,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1561 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1565 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1565 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1567 +POSTHOOK: Lineage: test_hive_1567.creation_date EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.ds EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.ds_ts SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.source_file_name SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_1556 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1556, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_1557 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1557, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_1558 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1558, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_1559 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1559, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_1560 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1560, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_1561 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1561, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.test_hive_422 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_422, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1567.ts EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1566 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1566 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1566 +as +select + test_hive_1558 as test_hive_1558 + ,test_hive_1556 as test_hive_1556 + ,test_hive_1559 as test_hive_1559 + ,test_hive_422 as test_hive_422 + ,test_hive_1557 as test_hive_1557 + ,test_hive_1560 as test_hive_1560 + ,test_hive_1561 as test_hive_1561 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1567 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1565 +PREHOOK: Input: default@test_hive_1567 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1566 +POSTHOOK: query: create view test_hive_1566 +as +select + test_hive_1558 as test_hive_1558 + ,test_hive_1556 as test_hive_1556 + ,test_hive_1559 as test_hive_1559 + ,test_hive_422 as test_hive_422 + ,test_hive_1557 as test_hive_1557 + ,test_hive_1560 as test_hive_1560 + ,test_hive_1561 as test_hive_1561 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1567 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1565 +POSTHOOK: Input: default@test_hive_1567 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1566 +POSTHOOK: Lineage: test_hive_1566.creation_date EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.ds EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.ds_ts SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.source_file_name SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_1556 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1556, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_1557 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1557, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_1558 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1558, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_1559 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1559, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_1560 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1560, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_1561 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1561, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.test_hive_422 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_422, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1566.ts EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1563 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1563 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1563 +as +select t1.* +from test_hive_1566 t1 +inner join test_hive_1564 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1564 +PREHOOK: Input: default@test_hive_1565 +PREHOOK: Input: default@test_hive_1566 +PREHOOK: Input: default@test_hive_1567 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1563 +POSTHOOK: query: create view test_hive_1563 +as +select t1.* +from test_hive_1566 t1 +inner join test_hive_1564 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1564 +POSTHOOK: Input: default@test_hive_1565 +POSTHOOK: Input: default@test_hive_1566 +POSTHOOK: Input: default@test_hive_1567 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1563 +POSTHOOK: Lineage: test_hive_1563.creation_date EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.ds EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.ds_ts SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.source_file_name SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_1556 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1556, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_1557 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1557, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_1558 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1558, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_1559 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1559, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_1560 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1560, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_1561 EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_1561, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.test_hive_422 SIMPLE [(test_hive_1565)test_hive_1565.FieldSchema(name:test_hive_422, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1563.ts EXPRESSION [(test_hive_1565)test_hive_1565.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1550 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1550 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1550 +( + test_hive_1545 string + ,test_hive_1543 string + ,test_hive_1546 string + ,test_hive_421 string + ,test_hive_1544 string + ,test_hive_1548 string + ,test_hive_1547 string + ,test_hive_1549 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1550 +POSTHOOK: query: create table test_hive_1550 +( + test_hive_1545 string + ,test_hive_1543 string + ,test_hive_1546 string + ,test_hive_421 string + ,test_hive_1544 string + ,test_hive_1548 string + ,test_hive_1547 string + ,test_hive_1549 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1550 +PREHOOK: query: create table if not exists test_hive_1553 +( + test_hive_1545 string + ,test_hive_1543 string + ,test_hive_1546 string + ,test_hive_421 string + ,test_hive_1544 string + ,test_hive_1548 string + ,test_hive_1547 string + ,test_hive_1549 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1553 +POSTHOOK: query: create table if not exists test_hive_1553 +( + test_hive_1545 string + ,test_hive_1543 string + ,test_hive_1546 string + ,test_hive_421 string + ,test_hive_1544 string + ,test_hive_1548 string + ,test_hive_1547 string + ,test_hive_1549 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1553 +PREHOOK: query: drop table if exists test_hive_1552 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1552 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1552 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1552 +POSTHOOK: query: create table if not exists test_hive_1552 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1552 +PREHOOK: query: drop view if exists test_hive_1555 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1555 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1555 +as +select + cast(test_hive_1545 as int) as test_hive_1545 + ,cast(test_hive_1543 as int) as test_hive_1543 + ,cast(test_hive_1546 as int) as test_hive_1546 + ,cast(test_hive_421 as string) as test_hive_421 + ,cast(test_hive_1544 as string) as test_hive_1544 + ,cast(test_hive_1548 as string) as test_hive_1548 + ,cast(test_hive_1547 as string) as test_hive_1547 + ,cast(from_unixtime(unix_timestamp(test_hive_1549,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1549 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1553 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1553 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1555 +POSTHOOK: query: create view if not exists test_hive_1555 +as +select + cast(test_hive_1545 as int) as test_hive_1545 + ,cast(test_hive_1543 as int) as test_hive_1543 + ,cast(test_hive_1546 as int) as test_hive_1546 + ,cast(test_hive_421 as string) as test_hive_421 + ,cast(test_hive_1544 as string) as test_hive_1544 + ,cast(test_hive_1548 as string) as test_hive_1548 + ,cast(test_hive_1547 as string) as test_hive_1547 + ,cast(from_unixtime(unix_timestamp(test_hive_1549,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1549 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1553 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1553 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1555 +POSTHOOK: Lineage: test_hive_1555.creation_date EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.ds EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.ds_ts SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.source_file_name SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1543 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1543, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1544 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1544, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1545 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1545, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1546 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1546, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1547 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1547, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1548 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1548, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_1549 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1549, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.test_hive_421 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_421, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1555.ts EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1554 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1554 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1554 +as +select + test_hive_1545 as test_hive_1545 + ,test_hive_1543 as test_hive_1543 + ,test_hive_1546 as test_hive_1546 + ,test_hive_421 as test_hive_421 + ,test_hive_1544 as test_hive_1544 + ,test_hive_1548 as test_hive_1548 + ,test_hive_1547 as test_hive_1547 + ,test_hive_1549 as test_hive_1549 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1555 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1553 +PREHOOK: Input: default@test_hive_1555 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1554 +POSTHOOK: query: create view test_hive_1554 +as +select + test_hive_1545 as test_hive_1545 + ,test_hive_1543 as test_hive_1543 + ,test_hive_1546 as test_hive_1546 + ,test_hive_421 as test_hive_421 + ,test_hive_1544 as test_hive_1544 + ,test_hive_1548 as test_hive_1548 + ,test_hive_1547 as test_hive_1547 + ,test_hive_1549 as test_hive_1549 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1555 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1553 +POSTHOOK: Input: default@test_hive_1555 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1554 +POSTHOOK: Lineage: test_hive_1554.creation_date EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.ds EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.ds_ts SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.source_file_name SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1543 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1543, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1544 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1544, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1545 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1545, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1546 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1546, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1547 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1547, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1548 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1548, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_1549 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1549, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.test_hive_421 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_421, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1554.ts EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1551 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1551 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1551 +as +select t1.* +from test_hive_1554 t1 +inner join test_hive_1552 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1552 +PREHOOK: Input: default@test_hive_1553 +PREHOOK: Input: default@test_hive_1554 +PREHOOK: Input: default@test_hive_1555 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1551 +POSTHOOK: query: create view test_hive_1551 +as +select t1.* +from test_hive_1554 t1 +inner join test_hive_1552 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1552 +POSTHOOK: Input: default@test_hive_1553 +POSTHOOK: Input: default@test_hive_1554 +POSTHOOK: Input: default@test_hive_1555 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1551 +POSTHOOK: Lineage: test_hive_1551.creation_date EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.ds EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.ds_ts SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.source_file_name SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1543 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1543, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1544 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1544, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1545 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1545, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1546 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1546, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1547 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1547, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1548 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1548, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_1549 EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_1549, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.test_hive_421 SIMPLE [(test_hive_1553)test_hive_1553.FieldSchema(name:test_hive_421, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1551.ts EXPRESSION [(test_hive_1553)test_hive_1553.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1328 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1328 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1328 +( + test_hive_1322 string + ,test_hive_1318 string + ,test_hive_1323 string + ,test_hive_335 string + ,test_hive_1321 string + ,test_hive_1320 string + ,test_hive_1319 string + ,test_hive_1326 string + ,test_hive_1325 string + ,test_hive_1324 string + ,test_hive_1327 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1328 +POSTHOOK: query: create table test_hive_1328 +( + test_hive_1322 string + ,test_hive_1318 string + ,test_hive_1323 string + ,test_hive_335 string + ,test_hive_1321 string + ,test_hive_1320 string + ,test_hive_1319 string + ,test_hive_1326 string + ,test_hive_1325 string + ,test_hive_1324 string + ,test_hive_1327 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1328 +PREHOOK: query: create table if not exists test_hive_1331 +( + test_hive_1322 string + ,test_hive_1318 string + ,test_hive_1323 string + ,test_hive_335 string + ,test_hive_1321 string + ,test_hive_1320 string + ,test_hive_1319 string + ,test_hive_1326 string + ,test_hive_1325 string + ,test_hive_1324 string + ,test_hive_1327 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1331 +POSTHOOK: query: create table if not exists test_hive_1331 +( + test_hive_1322 string + ,test_hive_1318 string + ,test_hive_1323 string + ,test_hive_335 string + ,test_hive_1321 string + ,test_hive_1320 string + ,test_hive_1319 string + ,test_hive_1326 string + ,test_hive_1325 string + ,test_hive_1324 string + ,test_hive_1327 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1331 +PREHOOK: query: drop table if exists test_hive_1330 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1330 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1330 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1330 +POSTHOOK: query: create table if not exists test_hive_1330 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1330 +PREHOOK: query: drop view if exists test_hive_1333 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1333 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1333 +as +select + cast(test_hive_1322 as int) as test_hive_1322 + ,cast(test_hive_1318 as int) as test_hive_1318 + ,cast(test_hive_1323 as int) as test_hive_1323 + ,cast(test_hive_335 as string) as test_hive_335 + ,cast(test_hive_1321 as string) as test_hive_1321 + ,cast(from_unixtime(unix_timestamp(test_hive_1320 ,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1320 + ,cast(from_unixtime(unix_timestamp(test_hive_1319 ,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1319 + ,cast(test_hive_1326 as string) as test_hive_1326 + ,cast(test_hive_1325 as string) as test_hive_1325 + ,cast(test_hive_1324 as string) as test_hive_1324 + ,cast(from_unixtime(unix_timestamp(test_hive_1327,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1327 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1331 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1331 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1333 +POSTHOOK: query: create view if not exists test_hive_1333 +as +select + cast(test_hive_1322 as int) as test_hive_1322 + ,cast(test_hive_1318 as int) as test_hive_1318 + ,cast(test_hive_1323 as int) as test_hive_1323 + ,cast(test_hive_335 as string) as test_hive_335 + ,cast(test_hive_1321 as string) as test_hive_1321 + ,cast(from_unixtime(unix_timestamp(test_hive_1320 ,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1320 + ,cast(from_unixtime(unix_timestamp(test_hive_1319 ,'yyyymmdd'), 'yyyy-mm-dd') as timestamp) as test_hive_1319 + ,cast(test_hive_1326 as string) as test_hive_1326 + ,cast(test_hive_1325 as string) as test_hive_1325 + ,cast(test_hive_1324 as string) as test_hive_1324 + ,cast(from_unixtime(unix_timestamp(test_hive_1327,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1327 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1331 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1331 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1333 +POSTHOOK: Lineage: test_hive_1333.creation_date EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.ds EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.ds_ts SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.source_file_name SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1318 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1318, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1319 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1319, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1320 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1320, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1321 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1321, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1322 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1322, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1323 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1323, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1324 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1324, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1325 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1325, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1326 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1326, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_1327 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1327, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.test_hive_335 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_335, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1333.ts EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1332 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1332 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1332 +as +select + test_hive_1322 as test_hive_1322 + ,test_hive_1318 as test_hive_1318 + ,test_hive_1323 as test_hive_1323 + ,test_hive_335 as test_hive_335 + ,test_hive_1321 as test_hive_1321 + ,test_hive_1320 as test_hive_1320 + ,test_hive_1319 as test_hive_1319 + ,test_hive_1326 as test_hive_1326 + ,test_hive_1325 as test_hive_1325 + ,test_hive_1324 as test_hive_1324 + ,test_hive_1327 as test_hive_1327 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1333 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1331 +PREHOOK: Input: default@test_hive_1333 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1332 +POSTHOOK: query: create view test_hive_1332 +as +select + test_hive_1322 as test_hive_1322 + ,test_hive_1318 as test_hive_1318 + ,test_hive_1323 as test_hive_1323 + ,test_hive_335 as test_hive_335 + ,test_hive_1321 as test_hive_1321 + ,test_hive_1320 as test_hive_1320 + ,test_hive_1319 as test_hive_1319 + ,test_hive_1326 as test_hive_1326 + ,test_hive_1325 as test_hive_1325 + ,test_hive_1324 as test_hive_1324 + ,test_hive_1327 as test_hive_1327 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1333 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1331 +POSTHOOK: Input: default@test_hive_1333 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1332 +POSTHOOK: Lineage: test_hive_1332.creation_date EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.ds EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.ds_ts SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.source_file_name SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1318 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1318, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1319 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1319, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1320 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1320, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1321 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1321, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1322 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1322, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1323 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1323, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1324 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1324, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1325 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1325, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1326 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1326, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_1327 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1327, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.test_hive_335 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_335, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1332.ts EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1329 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1329 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1329 +as +select t1.* +from test_hive_1332 t1 +inner join test_hive_1330 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1330 +PREHOOK: Input: default@test_hive_1331 +PREHOOK: Input: default@test_hive_1332 +PREHOOK: Input: default@test_hive_1333 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1329 +POSTHOOK: query: create view test_hive_1329 +as +select t1.* +from test_hive_1332 t1 +inner join test_hive_1330 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1330 +POSTHOOK: Input: default@test_hive_1331 +POSTHOOK: Input: default@test_hive_1332 +POSTHOOK: Input: default@test_hive_1333 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1329 +POSTHOOK: Lineage: test_hive_1329.creation_date EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.ds EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.ds_ts SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.source_file_name SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1318 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1318, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1319 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1319, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1320 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1320, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1321 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1321, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1322 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1322, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1323 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1323, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1324 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1324, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1325 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1325, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1326 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1326, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_1327 EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_1327, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.test_hive_335 SIMPLE [(test_hive_1331)test_hive_1331.FieldSchema(name:test_hive_335, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1329.ts EXPRESSION [(test_hive_1331)test_hive_1331.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1276 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1276 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1276 +( + test_hive_1272 string + ,test_hive_1270 string + ,test_hive_1273 string + ,test_hive_308 string + ,test_hive_1271 string + ,test_hive_1274 string + ,test_hive_1275 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1276 +POSTHOOK: query: create table test_hive_1276 +( + test_hive_1272 string + ,test_hive_1270 string + ,test_hive_1273 string + ,test_hive_308 string + ,test_hive_1271 string + ,test_hive_1274 string + ,test_hive_1275 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1276 +PREHOOK: query: create table if not exists test_hive_1279 +( + test_hive_1272 string + ,test_hive_1270 string + ,test_hive_1273 string + ,test_hive_308 string + ,test_hive_1271 string + ,test_hive_1274 string + ,test_hive_1275 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1279 +POSTHOOK: query: create table if not exists test_hive_1279 +( + test_hive_1272 string + ,test_hive_1270 string + ,test_hive_1273 string + ,test_hive_308 string + ,test_hive_1271 string + ,test_hive_1274 string + ,test_hive_1275 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1279 +PREHOOK: query: drop table if exists test_hive_1278 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1278 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1278 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1278 +POSTHOOK: query: create table if not exists test_hive_1278 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1278 +PREHOOK: query: drop view if exists test_hive_1281 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1281 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1281 +as +select + cast(test_hive_1272 as int) as test_hive_1272 + ,cast(test_hive_1270 as int) as test_hive_1270 + ,cast(test_hive_1273 as int) as test_hive_1273 + ,cast(test_hive_308 as string) as test_hive_308 + ,cast(test_hive_1271 as string) as test_hive_1271 + ,cast(test_hive_1274 as string) as test_hive_1274 + ,cast(from_unixtime(unix_timestamp(test_hive_1275,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1275 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1279 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1279 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1281 +POSTHOOK: query: create view if not exists test_hive_1281 +as +select + cast(test_hive_1272 as int) as test_hive_1272 + ,cast(test_hive_1270 as int) as test_hive_1270 + ,cast(test_hive_1273 as int) as test_hive_1273 + ,cast(test_hive_308 as string) as test_hive_308 + ,cast(test_hive_1271 as string) as test_hive_1271 + ,cast(test_hive_1274 as string) as test_hive_1274 + ,cast(from_unixtime(unix_timestamp(test_hive_1275,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1275 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1279 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1279 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1281 +POSTHOOK: Lineage: test_hive_1281.creation_date EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.ds EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.ds_ts SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.source_file_name SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_1270 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1270, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_1271 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1271, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_1272 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1272, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_1273 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1273, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_1274 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1274, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_1275 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1275, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.test_hive_308 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_308, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1281.ts EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1280 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1280 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1280 +as +select + test_hive_1272 as test_hive_1272 + ,test_hive_1270 as test_hive_1270 + ,test_hive_1273 as test_hive_1273 + ,test_hive_308 as test_hive_308 + ,test_hive_1271 as test_hive_1271 + ,test_hive_1274 as test_hive_1274 + ,test_hive_1275 as test_hive_1275 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1281 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1279 +PREHOOK: Input: default@test_hive_1281 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1280 +POSTHOOK: query: create view test_hive_1280 +as +select + test_hive_1272 as test_hive_1272 + ,test_hive_1270 as test_hive_1270 + ,test_hive_1273 as test_hive_1273 + ,test_hive_308 as test_hive_308 + ,test_hive_1271 as test_hive_1271 + ,test_hive_1274 as test_hive_1274 + ,test_hive_1275 as test_hive_1275 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1281 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1279 +POSTHOOK: Input: default@test_hive_1281 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1280 +POSTHOOK: Lineage: test_hive_1280.creation_date EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.ds EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.ds_ts SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.source_file_name SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_1270 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1270, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_1271 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1271, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_1272 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1272, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_1273 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1273, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_1274 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1274, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_1275 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1275, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.test_hive_308 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_308, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1280.ts EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1277 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1277 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1277 +as +select t1.* +from test_hive_1280 t1 +inner join test_hive_1278 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1278 +PREHOOK: Input: default@test_hive_1279 +PREHOOK: Input: default@test_hive_1280 +PREHOOK: Input: default@test_hive_1281 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1277 +POSTHOOK: query: create view test_hive_1277 +as +select t1.* +from test_hive_1280 t1 +inner join test_hive_1278 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1278 +POSTHOOK: Input: default@test_hive_1279 +POSTHOOK: Input: default@test_hive_1280 +POSTHOOK: Input: default@test_hive_1281 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1277 +POSTHOOK: Lineage: test_hive_1277.creation_date EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.ds EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.ds_ts SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.source_file_name SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_1270 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1270, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_1271 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1271, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_1272 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1272, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_1273 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1273, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_1274 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1274, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_1275 EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_1275, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.test_hive_308 SIMPLE [(test_hive_1279)test_hive_1279.FieldSchema(name:test_hive_308, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1277.ts EXPRESSION [(test_hive_1279)test_hive_1279.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1264 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1264 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1264 +( + test_hive_1258 string + ,test_hive_1256 string + ,test_hive_1259 string + ,test_hive_307 string + ,test_hive_306 string + ,test_hive_1257 string + ,test_hive_1262 string + ,test_hive_1261 string + ,test_hive_1260 string + ,test_hive_1263 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1264 +POSTHOOK: query: create table test_hive_1264 +( + test_hive_1258 string + ,test_hive_1256 string + ,test_hive_1259 string + ,test_hive_307 string + ,test_hive_306 string + ,test_hive_1257 string + ,test_hive_1262 string + ,test_hive_1261 string + ,test_hive_1260 string + ,test_hive_1263 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1264 +PREHOOK: query: create table if not exists test_hive_1267 +( + test_hive_1258 string + ,test_hive_1256 string + ,test_hive_1259 string + ,test_hive_307 string + ,test_hive_306 string + ,test_hive_1257 string + ,test_hive_1262 string + ,test_hive_1261 string + ,test_hive_1260 string + ,test_hive_1263 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1267 +POSTHOOK: query: create table if not exists test_hive_1267 +( + test_hive_1258 string + ,test_hive_1256 string + ,test_hive_1259 string + ,test_hive_307 string + ,test_hive_306 string + ,test_hive_1257 string + ,test_hive_1262 string + ,test_hive_1261 string + ,test_hive_1260 string + ,test_hive_1263 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1267 +PREHOOK: query: drop table if exists test_hive_1266 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1266 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1266 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1266 +POSTHOOK: query: create table if not exists test_hive_1266 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1266 +PREHOOK: query: drop view if exists test_hive_1269 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1269 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1269 +as +select + cast(test_hive_1258 as int) as test_hive_1258 + ,cast(test_hive_1256 as int) as test_hive_1256 + ,cast(test_hive_1259 as int) as test_hive_1259 + ,cast(test_hive_307 as string) as test_hive_307 + ,cast(test_hive_306 as string) as test_hive_306 + ,cast(test_hive_1257 as string) as test_hive_1257 + ,cast(test_hive_1262 as string) as test_hive_1262 + ,cast(test_hive_1261 as string) as test_hive_1261 + ,cast(test_hive_1260 as string) as test_hive_1260 + ,cast(from_unixtime(unix_timestamp(test_hive_1263,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1263 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1267 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1267 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1269 +POSTHOOK: query: create view if not exists test_hive_1269 +as +select + cast(test_hive_1258 as int) as test_hive_1258 + ,cast(test_hive_1256 as int) as test_hive_1256 + ,cast(test_hive_1259 as int) as test_hive_1259 + ,cast(test_hive_307 as string) as test_hive_307 + ,cast(test_hive_306 as string) as test_hive_306 + ,cast(test_hive_1257 as string) as test_hive_1257 + ,cast(test_hive_1262 as string) as test_hive_1262 + ,cast(test_hive_1261 as string) as test_hive_1261 + ,cast(test_hive_1260 as string) as test_hive_1260 + ,cast(from_unixtime(unix_timestamp(test_hive_1263,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1263 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1267 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1267 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1269 +POSTHOOK: Lineage: test_hive_1269.creation_date EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.ds EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.ds_ts SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.source_file_name SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1256 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1256, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1257 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1257, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1258 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1258, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1259 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1259, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1260 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1260, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1261 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1261, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1262 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1262, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_1263 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1263, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_306 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_306, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.test_hive_307 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_307, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1269.ts EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1268 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1268 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1268 +as +select + test_hive_1258 as test_hive_1258 + ,test_hive_1256 as test_hive_1256 + ,test_hive_1259 as test_hive_1259 + ,test_hive_307 as test_hive_307 + ,test_hive_306 as test_hive_306 + ,test_hive_1257 as test_hive_1257 + ,test_hive_1262 as test_hive_1262 + ,test_hive_1261 as test_hive_1261 + ,test_hive_1260 as test_hive_1260 + ,test_hive_1263 as test_hive_1263 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1269 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1267 +PREHOOK: Input: default@test_hive_1269 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1268 +POSTHOOK: query: create view test_hive_1268 +as +select + test_hive_1258 as test_hive_1258 + ,test_hive_1256 as test_hive_1256 + ,test_hive_1259 as test_hive_1259 + ,test_hive_307 as test_hive_307 + ,test_hive_306 as test_hive_306 + ,test_hive_1257 as test_hive_1257 + ,test_hive_1262 as test_hive_1262 + ,test_hive_1261 as test_hive_1261 + ,test_hive_1260 as test_hive_1260 + ,test_hive_1263 as test_hive_1263 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1269 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1267 +POSTHOOK: Input: default@test_hive_1269 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1268 +POSTHOOK: Lineage: test_hive_1268.creation_date EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.ds EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.ds_ts SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.source_file_name SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1256 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1256, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1257 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1257, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1258 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1258, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1259 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1259, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1260 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1260, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1261 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1261, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1262 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1262, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_1263 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1263, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_306 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_306, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.test_hive_307 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_307, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1268.ts EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1265 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1265 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1265 +as +select t1.* +from test_hive_1268 t1 +inner join test_hive_1266 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1266 +PREHOOK: Input: default@test_hive_1267 +PREHOOK: Input: default@test_hive_1268 +PREHOOK: Input: default@test_hive_1269 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1265 +POSTHOOK: query: create view test_hive_1265 +as +select t1.* +from test_hive_1268 t1 +inner join test_hive_1266 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1266 +POSTHOOK: Input: default@test_hive_1267 +POSTHOOK: Input: default@test_hive_1268 +POSTHOOK: Input: default@test_hive_1269 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1265 +POSTHOOK: Lineage: test_hive_1265.creation_date EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.ds EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.ds_ts SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.source_file_name SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1256 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1256, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1257 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1257, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1258 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1258, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1259 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1259, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1260 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1260, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1261 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1261, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1262 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1262, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_1263 EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_1263, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_306 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_306, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.test_hive_307 SIMPLE [(test_hive_1267)test_hive_1267.FieldSchema(name:test_hive_307, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1265.ts EXPRESSION [(test_hive_1267)test_hive_1267.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1226 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1226 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1226 +( + test_hive_1222 string + ,test_hive_1220 string + ,test_hive_1223 string + ,test_hive_280 string + ,test_hive_1221 string + ,test_hive_1224 string + ,test_hive_1225 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1226 +POSTHOOK: query: create table test_hive_1226 +( + test_hive_1222 string + ,test_hive_1220 string + ,test_hive_1223 string + ,test_hive_280 string + ,test_hive_1221 string + ,test_hive_1224 string + ,test_hive_1225 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1226 +PREHOOK: query: create table if not exists test_hive_1229 +( + test_hive_1222 string + ,test_hive_1220 string + ,test_hive_1223 string + ,test_hive_280 string + ,test_hive_1221 string + ,test_hive_1224 string + ,test_hive_1225 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1229 +POSTHOOK: query: create table if not exists test_hive_1229 +( + test_hive_1222 string + ,test_hive_1220 string + ,test_hive_1223 string + ,test_hive_280 string + ,test_hive_1221 string + ,test_hive_1224 string + ,test_hive_1225 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1229 +PREHOOK: query: drop table if exists test_hive_1228 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1228 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1228 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1228 +POSTHOOK: query: create table if not exists test_hive_1228 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1228 +PREHOOK: query: drop view if exists test_hive_1231 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1231 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1231 +as +select + cast(test_hive_1222 as int) as test_hive_1222 + ,cast(test_hive_1220 as int) as test_hive_1220 + ,cast(test_hive_1223 as int) as test_hive_1223 + ,cast(test_hive_280 as string) as test_hive_280 + ,cast(test_hive_1221 as string) as test_hive_1221 + ,cast(test_hive_1224 as string) as test_hive_1224 + ,cast(from_unixtime(unix_timestamp(test_hive_1225,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1225 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1229 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1229 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1231 +POSTHOOK: query: create view if not exists test_hive_1231 +as +select + cast(test_hive_1222 as int) as test_hive_1222 + ,cast(test_hive_1220 as int) as test_hive_1220 + ,cast(test_hive_1223 as int) as test_hive_1223 + ,cast(test_hive_280 as string) as test_hive_280 + ,cast(test_hive_1221 as string) as test_hive_1221 + ,cast(test_hive_1224 as string) as test_hive_1224 + ,cast(from_unixtime(unix_timestamp(test_hive_1225,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1225 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1229 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1229 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1231 +POSTHOOK: Lineage: test_hive_1231.creation_date EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.ds EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.ds_ts SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.source_file_name SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_1220 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1220, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_1221 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1221, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_1222 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1222, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_1223 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1223, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_1224 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1224, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_1225 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1225, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.test_hive_280 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_280, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1231.ts EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1230 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1230 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1230 +as +select + test_hive_1222 as test_hive_1222 + ,test_hive_1220 as test_hive_1220 + ,test_hive_1223 as test_hive_1223 + ,test_hive_280 as test_hive_280 + ,test_hive_1221 as test_hive_1221 + ,test_hive_1224 as test_hive_1224 + ,test_hive_1225 as test_hive_1225 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1231 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1229 +PREHOOK: Input: default@test_hive_1231 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1230 +POSTHOOK: query: create view test_hive_1230 +as +select + test_hive_1222 as test_hive_1222 + ,test_hive_1220 as test_hive_1220 + ,test_hive_1223 as test_hive_1223 + ,test_hive_280 as test_hive_280 + ,test_hive_1221 as test_hive_1221 + ,test_hive_1224 as test_hive_1224 + ,test_hive_1225 as test_hive_1225 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1231 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1229 +POSTHOOK: Input: default@test_hive_1231 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1230 +POSTHOOK: Lineage: test_hive_1230.creation_date EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.ds EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.ds_ts SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.source_file_name SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_1220 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1220, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_1221 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1221, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_1222 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1222, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_1223 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1223, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_1224 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1224, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_1225 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1225, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.test_hive_280 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_280, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1230.ts EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1227 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1227 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1227 +as +select t1.* +from test_hive_1230 t1 +inner join test_hive_1228 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1228 +PREHOOK: Input: default@test_hive_1229 +PREHOOK: Input: default@test_hive_1230 +PREHOOK: Input: default@test_hive_1231 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1227 +POSTHOOK: query: create view test_hive_1227 +as +select t1.* +from test_hive_1230 t1 +inner join test_hive_1228 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1228 +POSTHOOK: Input: default@test_hive_1229 +POSTHOOK: Input: default@test_hive_1230 +POSTHOOK: Input: default@test_hive_1231 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1227 +POSTHOOK: Lineage: test_hive_1227.creation_date EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.ds EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.ds_ts SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.source_file_name SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_1220 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1220, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_1221 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1221, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_1222 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1222, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_1223 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1223, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_1224 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1224, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_1225 EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_1225, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.test_hive_280 SIMPLE [(test_hive_1229)test_hive_1229.FieldSchema(name:test_hive_280, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1227.ts EXPRESSION [(test_hive_1229)test_hive_1229.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_1214 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1214 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_1214 +( + test_hive_1210 string + ,test_hive_1208 string + ,test_hive_1211 string + ,test_hive_279 string + ,test_hive_1209 string + ,test_hive_1212 string + ,test_hive_1213 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1214 +POSTHOOK: query: create table test_hive_1214 +( + test_hive_1210 string + ,test_hive_1208 string + ,test_hive_1211 string + ,test_hive_279 string + ,test_hive_1209 string + ,test_hive_1212 string + ,test_hive_1213 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1214 +PREHOOK: query: create table if not exists test_hive_1217 +( + test_hive_1210 string + ,test_hive_1208 string + ,test_hive_1211 string + ,test_hive_279 string + ,test_hive_1209 string + ,test_hive_1212 string + ,test_hive_1213 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1217 +POSTHOOK: query: create table if not exists test_hive_1217 +( + test_hive_1210 string + ,test_hive_1208 string + ,test_hive_1211 string + ,test_hive_279 string + ,test_hive_1209 string + ,test_hive_1212 string + ,test_hive_1213 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1217 +PREHOOK: query: drop table if exists test_hive_1216 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_1216 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_1216 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1216 +POSTHOOK: query: create table if not exists test_hive_1216 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1216 +PREHOOK: query: drop view if exists test_hive_1219 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1219 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_1219 +as +select + cast(test_hive_1210 as int) as test_hive_1210 + ,cast(test_hive_1208 as int) as test_hive_1208 + ,cast(test_hive_1211 as int) as test_hive_1211 + ,cast(test_hive_279 as string) as test_hive_279 + ,cast(test_hive_1209 as string) as test_hive_1209 + ,cast(test_hive_1212 as string) as test_hive_1212 + ,cast(from_unixtime(unix_timestamp(test_hive_1213,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1213 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1217 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1217 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1219 +POSTHOOK: query: create view if not exists test_hive_1219 +as +select + cast(test_hive_1210 as int) as test_hive_1210 + ,cast(test_hive_1208 as int) as test_hive_1208 + ,cast(test_hive_1211 as int) as test_hive_1211 + ,cast(test_hive_279 as string) as test_hive_279 + ,cast(test_hive_1209 as string) as test_hive_1209 + ,cast(test_hive_1212 as string) as test_hive_1212 + ,cast(from_unixtime(unix_timestamp(test_hive_1213,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_1213 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_1217 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1217 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1219 +POSTHOOK: Lineage: test_hive_1219.creation_date EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.ds EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.ds_ts SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.source_file_name SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_1208 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1208, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_1209 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1209, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_1210 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1210, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_1211 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1211, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_1212 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1212, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_1213 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1213, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.test_hive_279 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_279, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1219.ts EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1218 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1218 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1218 +as +select + test_hive_1210 as test_hive_1210 + ,test_hive_1208 as test_hive_1208 + ,test_hive_1211 as test_hive_1211 + ,test_hive_279 as test_hive_279 + ,test_hive_1209 as test_hive_1209 + ,test_hive_1212 as test_hive_1212 + ,test_hive_1213 as test_hive_1213 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1219 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1217 +PREHOOK: Input: default@test_hive_1219 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1218 +POSTHOOK: query: create view test_hive_1218 +as +select + test_hive_1210 as test_hive_1210 + ,test_hive_1208 as test_hive_1208 + ,test_hive_1211 as test_hive_1211 + ,test_hive_279 as test_hive_279 + ,test_hive_1209 as test_hive_1209 + ,test_hive_1212 as test_hive_1212 + ,test_hive_1213 as test_hive_1213 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_1219 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1217 +POSTHOOK: Input: default@test_hive_1219 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1218 +POSTHOOK: Lineage: test_hive_1218.creation_date EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.ds EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.ds_ts SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.source_file_name SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_1208 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1208, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_1209 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1209, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_1210 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1210, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_1211 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1211, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_1212 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1212, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_1213 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1213, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.test_hive_279 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_279, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1218.ts EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_1215 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_1215 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_1215 +as +select t1.* +from test_hive_1218 t1 +inner join test_hive_1216 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_1216 +PREHOOK: Input: default@test_hive_1217 +PREHOOK: Input: default@test_hive_1218 +PREHOOK: Input: default@test_hive_1219 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_1215 +POSTHOOK: query: create view test_hive_1215 +as +select t1.* +from test_hive_1218 t1 +inner join test_hive_1216 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_1216 +POSTHOOK: Input: default@test_hive_1217 +POSTHOOK: Input: default@test_hive_1218 +POSTHOOK: Input: default@test_hive_1219 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_1215 +POSTHOOK: Lineage: test_hive_1215.creation_date EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.ds EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.ds_ts SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.source_file_name SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_1208 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1208, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_1209 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1209, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_1210 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1210, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_1211 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1211, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_1212 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1212, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_1213 EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_1213, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.test_hive_279 SIMPLE [(test_hive_1217)test_hive_1217.FieldSchema(name:test_hive_279, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_1215.ts EXPRESSION [(test_hive_1217)test_hive_1217.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop table if exists test_hive_2046 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2046 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table test_hive_2046 +( + test_hive_2043 string + ,test_hive_2034 string + ,test_hive_2044 string + ,test_hive_2033 string + ,test_hive_459 string + ,test_hive_460 string + ,test_hive_461 string + ,test_hive_462 string + ,test_hive_463 string + ,test_hive_464 string + ,test_hive_465 string + ,test_hive_2035 string + ,test_hive_2036 string + ,test_hive_2037 string + ,test_hive_2038 string + ,test_hive_2039 string + ,test_hive_2040 string + ,test_hive_2041 string + ,test_hive_2042 string + ,test_hive_467 string + ,test_hive_468 string + ,test_hive_469 string + ,test_hive_466 string + ,test_hive_2045 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2046 +POSTHOOK: query: create table test_hive_2046 +( + test_hive_2043 string + ,test_hive_2034 string + ,test_hive_2044 string + ,test_hive_2033 string + ,test_hive_459 string + ,test_hive_460 string + ,test_hive_461 string + ,test_hive_462 string + ,test_hive_463 string + ,test_hive_464 string + ,test_hive_465 string + ,test_hive_2035 string + ,test_hive_2036 string + ,test_hive_2037 string + ,test_hive_2038 string + ,test_hive_2039 string + ,test_hive_2040 string + ,test_hive_2041 string + ,test_hive_2042 string + ,test_hive_467 string + ,test_hive_468 string + ,test_hive_469 string + ,test_hive_466 string + ,test_hive_2045 string +) +partitioned by (ds int, ts int) +row format delimited fields terminated by '31' +tblproperties('serialization.null.format' = '') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2046 +PREHOOK: query: create table if not exists test_hive_2049 +( + test_hive_2043 string + ,test_hive_2034 string + ,test_hive_2044 string + ,test_hive_2033 string + ,test_hive_459 string + ,test_hive_460 string + ,test_hive_461 string + ,test_hive_462 string + ,test_hive_463 string + ,test_hive_464 string + ,test_hive_465 string + ,test_hive_2035 string + ,test_hive_2036 string + ,test_hive_2037 string + ,test_hive_2038 string + ,test_hive_2039 string + ,test_hive_2040 string + ,test_hive_2041 string + ,test_hive_2042 string + ,test_hive_467 string + ,test_hive_468 string + ,test_hive_469 string + ,test_hive_466 string + ,test_hive_2045 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2049 +POSTHOOK: query: create table if not exists test_hive_2049 +( + test_hive_2043 string + ,test_hive_2034 string + ,test_hive_2044 string + ,test_hive_2033 string + ,test_hive_459 string + ,test_hive_460 string + ,test_hive_461 string + ,test_hive_462 string + ,test_hive_463 string + ,test_hive_464 string + ,test_hive_465 string + ,test_hive_2035 string + ,test_hive_2036 string + ,test_hive_2037 string + ,test_hive_2038 string + ,test_hive_2039 string + ,test_hive_2040 string + ,test_hive_2041 string + ,test_hive_2042 string + ,test_hive_467 string + ,test_hive_468 string + ,test_hive_469 string + ,test_hive_466 string + ,test_hive_2045 string + ,source_file_name string + ,creation_date string + ,ds_ts bigint + ,ts int +) +partitioned by (ds int) +stored as parquet +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2049 +PREHOOK: query: drop table if exists test_hive_2048 purge +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists test_hive_2048 purge +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table if not exists test_hive_2048 +( +max_partition bigint +) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2048 +POSTHOOK: query: create table if not exists test_hive_2048 +( +max_partition bigint +) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2048 +PREHOOK: query: drop view if exists test_hive_2051 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2051 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view if not exists test_hive_2051 +as +select + cast(test_hive_2043 as int) as test_hive_2043 + ,cast(test_hive_2034 as int) as test_hive_2034 + ,cast(test_hive_2044 as int) as test_hive_2044 + ,cast(test_hive_2033 as string) as test_hive_2033 + ,cast(test_hive_459 as string) as test_hive_459 + ,cast(test_hive_460 as string) as test_hive_460 + ,cast(test_hive_461 as string) as test_hive_461 + ,cast(test_hive_462 as string) as test_hive_462 + ,cast(test_hive_463 as string) as test_hive_463 + ,cast(test_hive_464 as string) as test_hive_464 + ,cast(test_hive_465 as string) as test_hive_465 + ,cast(test_hive_2035 as int) as test_hive_2035 + ,cast(test_hive_2036 as int) as test_hive_2036 + ,cast(test_hive_2037 as int) as test_hive_2037 + ,cast(test_hive_2038 as int) as test_hive_2038 + ,cast(test_hive_2039 as int) as test_hive_2039 + ,cast(test_hive_2040 as int) as test_hive_2040 + ,cast(test_hive_2041 as int) as test_hive_2041 + ,cast(test_hive_2042 as int) as test_hive_2042 + ,cast(test_hive_467 as string) as test_hive_467 + ,cast(test_hive_468 as string) as test_hive_468 + ,cast(test_hive_469 as string) as test_hive_469 + ,cast(test_hive_466 as string) as test_hive_466 + ,cast(from_unixtime(unix_timestamp(test_hive_2045,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2045 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2049 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2049 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2051 +POSTHOOK: query: create view if not exists test_hive_2051 +as +select + cast(test_hive_2043 as int) as test_hive_2043 + ,cast(test_hive_2034 as int) as test_hive_2034 + ,cast(test_hive_2044 as int) as test_hive_2044 + ,cast(test_hive_2033 as string) as test_hive_2033 + ,cast(test_hive_459 as string) as test_hive_459 + ,cast(test_hive_460 as string) as test_hive_460 + ,cast(test_hive_461 as string) as test_hive_461 + ,cast(test_hive_462 as string) as test_hive_462 + ,cast(test_hive_463 as string) as test_hive_463 + ,cast(test_hive_464 as string) as test_hive_464 + ,cast(test_hive_465 as string) as test_hive_465 + ,cast(test_hive_2035 as int) as test_hive_2035 + ,cast(test_hive_2036 as int) as test_hive_2036 + ,cast(test_hive_2037 as int) as test_hive_2037 + ,cast(test_hive_2038 as int) as test_hive_2038 + ,cast(test_hive_2039 as int) as test_hive_2039 + ,cast(test_hive_2040 as int) as test_hive_2040 + ,cast(test_hive_2041 as int) as test_hive_2041 + ,cast(test_hive_2042 as int) as test_hive_2042 + ,cast(test_hive_467 as string) as test_hive_467 + ,cast(test_hive_468 as string) as test_hive_468 + ,cast(test_hive_469 as string) as test_hive_469 + ,cast(test_hive_466 as string) as test_hive_466 + ,cast(from_unixtime(unix_timestamp(test_hive_2045,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') as timestamp) as test_hive_2045 + ,source_file_name + ,cast(creation_date as timestamp) as creation_date + ,ds_ts + ,cast(ds as bigint) as ds + ,cast(ts as bigint) as ts +from test_hive_2049 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2049 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2051 +POSTHOOK: Lineage: test_hive_2051.creation_date EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.ds EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.ds_ts SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.source_file_name SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2033 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2033, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2034 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2034, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2035 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2035, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2036 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2036, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2037 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2037, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2038 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2038, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2039 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2039, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2040 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2040, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2041 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2041, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2042 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2042, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2043 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2043, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2044 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2044, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_2045 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2045, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_459 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_459, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_460 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_460, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_461 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_461, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_462 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_462, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_463 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_463, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_464 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_464, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_465 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_465, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_466 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_466, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_467 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_467, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_468 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_468, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.test_hive_469 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_469, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2051.ts EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2050 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2050 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2050 +as +select + test_hive_2043 as test_hive_2043 + ,test_hive_2034 as test_hive_2034 + ,test_hive_2044 as test_hive_2044 + ,test_hive_2033 as test_hive_2033 + ,test_hive_459 as test_hive_459 + ,test_hive_460 as test_hive_460 + ,test_hive_461 as test_hive_461 + ,test_hive_462 as test_hive_462 + ,test_hive_463 as test_hive_463 + ,test_hive_464 as test_hive_464 + ,test_hive_465 as test_hive_465 + ,test_hive_2035 as test_hive_2035 + ,test_hive_2036 as test_hive_2036 + ,test_hive_2037 as test_hive_2037 + ,test_hive_2038 as test_hive_2038 + ,test_hive_2039 as test_hive_2039 + ,test_hive_2040 as test_hive_2040 + ,test_hive_2041 as test_hive_2041 + ,test_hive_2042 as test_hive_2042 + ,test_hive_467 as test_hive_467 + ,test_hive_468 as test_hive_468 + ,test_hive_469 as test_hive_469 + ,test_hive_466 as test_hive_466 + ,test_hive_2045 as test_hive_2045 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2051 t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2049 +PREHOOK: Input: default@test_hive_2051 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2050 +POSTHOOK: query: create view test_hive_2050 +as +select + test_hive_2043 as test_hive_2043 + ,test_hive_2034 as test_hive_2034 + ,test_hive_2044 as test_hive_2044 + ,test_hive_2033 as test_hive_2033 + ,test_hive_459 as test_hive_459 + ,test_hive_460 as test_hive_460 + ,test_hive_461 as test_hive_461 + ,test_hive_462 as test_hive_462 + ,test_hive_463 as test_hive_463 + ,test_hive_464 as test_hive_464 + ,test_hive_465 as test_hive_465 + ,test_hive_2035 as test_hive_2035 + ,test_hive_2036 as test_hive_2036 + ,test_hive_2037 as test_hive_2037 + ,test_hive_2038 as test_hive_2038 + ,test_hive_2039 as test_hive_2039 + ,test_hive_2040 as test_hive_2040 + ,test_hive_2041 as test_hive_2041 + ,test_hive_2042 as test_hive_2042 + ,test_hive_467 as test_hive_467 + ,test_hive_468 as test_hive_468 + ,test_hive_469 as test_hive_469 + ,test_hive_466 as test_hive_466 + ,test_hive_2045 as test_hive_2045 + ,source_file_name + ,creation_date + ,ds_ts + ,ts + ,ds +from test_hive_2051 t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2049 +POSTHOOK: Input: default@test_hive_2051 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2050 +POSTHOOK: Lineage: test_hive_2050.creation_date EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.ds EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.ds_ts SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.source_file_name SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2033 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2033, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2034 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2034, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2035 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2035, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2036 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2036, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2037 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2037, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2038 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2038, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2039 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2039, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2040 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2040, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2041 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2041, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2042 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2042, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2043 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2043, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2044 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2044, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_2045 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2045, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_459 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_459, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_460 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_460, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_461 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_461, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_462 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_462, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_463 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_463, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_464 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_464, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_465 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_465, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_466 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_466, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_467 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_467, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_468 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_468, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.test_hive_469 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_469, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2050.ts EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: drop view if exists test_hive_2047 +PREHOOK: type: DROPVIEW +POSTHOOK: query: drop view if exists test_hive_2047 +POSTHOOK: type: DROPVIEW +PREHOOK: query: create view test_hive_2047 +as +select t1.* +from test_hive_2050 t1 +inner join test_hive_2048 t2 on +t1.ds_ts = t2.max_partition +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@test_hive_2048 +PREHOOK: Input: default@test_hive_2049 +PREHOOK: Input: default@test_hive_2050 +PREHOOK: Input: default@test_hive_2051 +PREHOOK: Output: database:default +PREHOOK: Output: default@test_hive_2047 +POSTHOOK: query: create view test_hive_2047 +as +select t1.* +from test_hive_2050 t1 +inner join test_hive_2048 t2 on +t1.ds_ts = t2.max_partition +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@test_hive_2048 +POSTHOOK: Input: default@test_hive_2049 +POSTHOOK: Input: default@test_hive_2050 +POSTHOOK: Input: default@test_hive_2051 +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test_hive_2047 +POSTHOOK: Lineage: test_hive_2047.creation_date EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:creation_date, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.ds EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:ds, type:int, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.ds_ts SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:ds_ts, type:bigint, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.source_file_name SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:source_file_name, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2033 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2033, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2034 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2034, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2035 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2035, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2036 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2036, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2037 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2037, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2038 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2038, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2039 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2039, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2040 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2040, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2041 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2041, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2042 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2042, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2043 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2043, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2044 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2044, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_2045 EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_2045, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_459 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_459, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_460 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_460, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_461 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_461, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_462 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_462, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_463 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_463, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_464 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_464, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_465 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_465, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_466 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_466, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_467 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_467, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_468 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_468, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.test_hive_469 SIMPLE [(test_hive_2049)test_hive_2049.FieldSchema(name:test_hive_469, type:string, comment:null), ] +POSTHOOK: Lineage: test_hive_2047.ts EXPRESSION [(test_hive_2049)test_hive_2049.FieldSchema(name:ts, type:int, comment:null), ] +PREHOOK: query: explain select +t1.test_hive_1018, +t1.test_hive_1004, +t1.test_hive_1025, +t2.test_hive_1560, +t4.test_hive_1274, +t1.test_hive_29, +t7.test_hive_1948, +t1.test_hive_97, +t32.test_hive_1610, +t1.test_hive_98, +t34.test_hive_1972, +t35.test_hive_1792, +t41.test_hive_1224, +t43.test_hive_1895, +t44.test_hive_1907, +t45.test_hive_1935, +t46.test_hive_2010, +t47.test_hive_2023, +t1.test_hive_78, +t15.test_hive_1260, +t1.test_hive_79, +t1.test_hive_24, +t3.test_hive_1716, +t42.test_hive_1224, +t14.test_hive_1198, +t23.test_hive_1459, +t28.test_hive_1533, +t26.test_hive_1503, +t11.test_hive_1154, +t21.test_hive_1429, +t17.test_hive_1340, +t18.test_hive_1356, +t38.test_hive_1847, +t39.test_hive_1859, +t40.test_hive_1871, +t12.test_hive_1168, +t22.test_hive_1443, +t13.test_hive_1182, +t25.test_hive_1487, +t24.test_hive_1473, +t27.test_hive_1517, +t8.test_hive_1110, +t9.test_hive_1124, +t10.test_hive_1138, +t16.test_hive_1309, +t36.test_hive_1806, +t1.test_hive_104, +t1.test_hive_1002, +t1.test_hive_1003, +t1.test_hive_25, +t5.test_hive_1960, +t29.test_hive_1547, +t30.test_hive_1224, +t31.test_hive_1224, +t33.test_hive_1778, +t37.test_hive_1834, +t19.test_hive_1972, +t20.test_hive_1972, +t1.test_hive_100, +t1.test_hive_1023, +t1.test_hive_1024, +t1.test_hive_1010, +t1.test_hive_1010_a_d, +t1.test_hive_1010_a_g, +t1.test_hive_1026, +t1.test_hive_1000, +t1.test_hive_1001, +t1.test_hive_1030, +t1.test_hive_1030_1, +t1.test_hive_1030_2, +t1.test_hive_1030_3, +t1.test_hive_1021, +t1.test_hive_1020, +t1.test_hive_1022, +t1.test_hive_1019, +t1.test_hive_1027, +t1.test_hive_1028, +t1.test_hive_1029, +t1.test_hive_1005, +t1.test_hive_1005_a_d, +t1.test_hive_1005_psr, +t1.test_hive_1005_psr_a_d, +t1.test_hive_1005_psr_e, +t1.test_hive_1013, +t1.test_hive_1013_a_d, +t1.test_hive_1013_psr, +t1.test_hive_1013_psr_a_d, +t1.test_hive_1013_psr_e, +t1.test_hive_1034 +from test_hive_1036 t1 +join test_hive_1563 t2 on t1.test_hive_23 = t2.test_hive_422 +join test_hive_1721 t3 on t1.test_hive_26 = t3.test_hive_434 +join test_hive_1277 t4 on t1.test_hive_27 = t4.test_hive_308 +join test_hive_1963 t5 on t1.test_hive_28 = t5.test_hive_453 +join test_hive_1951 t7 on t1.test_hive_30 = t7.test_hive_452 +join test_hive_1115 t8 on t1.test_hive_71 = t8.test_hive_272 +join test_hive_1129 t9 on t1.test_hive_72 = t9.test_hive_273 +join test_hive_1143 t10 on t1.test_hive_73 = t10.test_hive_274 +join test_hive_1159 t11 on t1.test_hive_74 = t11.test_hive_275 +join test_hive_1173 t12 on t1.test_hive_75 = t12.test_hive_276 +join test_hive_1187 t13 on t1.test_hive_76 = t13.test_hive_277 +join test_hive_1203 t14 on t1.test_hive_77 = t14.test_hive_278 +join test_hive_1265 t15 on t1.test_hive_78 = t15.test_hive_306 +join test_hive_1313 t16 on t1.test_hive_80 = t16.test_hive_334 +join test_hive_1345 t17 on t1.test_hive_81 = t17.test_hive_336 +join test_hive_1361 t18 on t1.test_hive_82 = t18.test_hive_337 +join test_hive_1977 t19 on t1.test_hive_83 = t19.test_hive_454 +join test_hive_1977 t20 on t1.test_hive_84 = t20.test_hive_454 +join test_hive_1434 t21 on t1.test_hive_85 = t21.test_hive_413 +join test_hive_1448 t22 on t1.test_hive_86 = t22.test_hive_414 +join test_hive_1464 t23 on t1.test_hive_87 = t23.test_hive_415 +join test_hive_1478 t24 on t1.test_hive_88 = t24.test_hive_416 +join test_hive_1492 t25 on t1.test_hive_89 = t25.test_hive_417 +join test_hive_1508 t26 on t1.test_hive_90 = t26.test_hive_418 +join test_hive_1522 t27 on t1.test_hive_91 = t27.test_hive_419 +join test_hive_1538 t28 on t1.test_hive_92 = t28.test_hive_420 +join test_hive_1551 t29 on t1.test_hive_93 = t29.test_hive_421 +join test_hive_1227 t30 on t1.test_hive_94 = t30.test_hive_280 +join test_hive_1227 t31 on t1.test_hive_95 = t31.test_hive_280 +join test_hive_1615 t32 on t1.test_hive_96 = t32.test_hive_426 +join test_hive_1783 t33 on t1.test_hive_99 = t33.test_hive_440 +join test_hive_1977 t34 on t1.test_hive_101 = t34.test_hive_454 +join test_hive_1797 t35 on t1.test_hive_102 = t35.test_hive_441 +join test_hive_1811 t36 on t1.test_hive_103 = t36.test_hive_442 +join test_hive_1838 t37 on t1.test_hive_105 = t37.test_hive_444 +join test_hive_1850 t38 on t1.test_hive_106 = t38.test_hive_445 +join test_hive_1862 t39 on t1.test_hive_107 = t39.test_hive_446 +join test_hive_1874 t40 on t1.test_hive_108 = t40.test_hive_447 +join test_hive_1227 t41 on t1.test_hive_109 = t41.test_hive_280 +join test_hive_1227 t42 on t1.test_hive_110 = t42.test_hive_280 +join test_hive_1898 t43 on t1.test_hive_111 = t43.test_hive_449 +join test_hive_1911 t44 on t1.test_hive_112 = t44.test_hive_450 +join test_hive_1939 t45 on t1.test_hive_113 = t45.test_hive_451 +join test_hive_2014 t46 on t1.test_hive_114 = t46.test_hive_457 +join test_hive_2028 t47 on t1.test_hive_115 = t47.test_hive_458 +PREHOOK: type: QUERY +POSTHOOK: query: explain select +t1.test_hive_1018, +t1.test_hive_1004, +t1.test_hive_1025, +t2.test_hive_1560, +t4.test_hive_1274, +t1.test_hive_29, +t7.test_hive_1948, +t1.test_hive_97, +t32.test_hive_1610, +t1.test_hive_98, +t34.test_hive_1972, +t35.test_hive_1792, +t41.test_hive_1224, +t43.test_hive_1895, +t44.test_hive_1907, +t45.test_hive_1935, +t46.test_hive_2010, +t47.test_hive_2023, +t1.test_hive_78, +t15.test_hive_1260, +t1.test_hive_79, +t1.test_hive_24, +t3.test_hive_1716, +t42.test_hive_1224, +t14.test_hive_1198, +t23.test_hive_1459, +t28.test_hive_1533, +t26.test_hive_1503, +t11.test_hive_1154, +t21.test_hive_1429, +t17.test_hive_1340, +t18.test_hive_1356, +t38.test_hive_1847, +t39.test_hive_1859, +t40.test_hive_1871, +t12.test_hive_1168, +t22.test_hive_1443, +t13.test_hive_1182, +t25.test_hive_1487, +t24.test_hive_1473, +t27.test_hive_1517, +t8.test_hive_1110, +t9.test_hive_1124, +t10.test_hive_1138, +t16.test_hive_1309, +t36.test_hive_1806, +t1.test_hive_104, +t1.test_hive_1002, +t1.test_hive_1003, +t1.test_hive_25, +t5.test_hive_1960, +t29.test_hive_1547, +t30.test_hive_1224, +t31.test_hive_1224, +t33.test_hive_1778, +t37.test_hive_1834, +t19.test_hive_1972, +t20.test_hive_1972, +t1.test_hive_100, +t1.test_hive_1023, +t1.test_hive_1024, +t1.test_hive_1010, +t1.test_hive_1010_a_d, +t1.test_hive_1010_a_g, +t1.test_hive_1026, +t1.test_hive_1000, +t1.test_hive_1001, +t1.test_hive_1030, +t1.test_hive_1030_1, +t1.test_hive_1030_2, +t1.test_hive_1030_3, +t1.test_hive_1021, +t1.test_hive_1020, +t1.test_hive_1022, +t1.test_hive_1019, +t1.test_hive_1027, +t1.test_hive_1028, +t1.test_hive_1029, +t1.test_hive_1005, +t1.test_hive_1005_a_d, +t1.test_hive_1005_psr, +t1.test_hive_1005_psr_a_d, +t1.test_hive_1005_psr_e, +t1.test_hive_1013, +t1.test_hive_1013_a_d, +t1.test_hive_1013_psr, +t1.test_hive_1013_psr_a_d, +t1.test_hive_1013_psr_e, +t1.test_hive_1034 +from test_hive_1036 t1 +join test_hive_1563 t2 on t1.test_hive_23 = t2.test_hive_422 +join test_hive_1721 t3 on t1.test_hive_26 = t3.test_hive_434 +join test_hive_1277 t4 on t1.test_hive_27 = t4.test_hive_308 +join test_hive_1963 t5 on t1.test_hive_28 = t5.test_hive_453 +join test_hive_1951 t7 on t1.test_hive_30 = t7.test_hive_452 +join test_hive_1115 t8 on t1.test_hive_71 = t8.test_hive_272 +join test_hive_1129 t9 on t1.test_hive_72 = t9.test_hive_273 +join test_hive_1143 t10 on t1.test_hive_73 = t10.test_hive_274 +join test_hive_1159 t11 on t1.test_hive_74 = t11.test_hive_275 +join test_hive_1173 t12 on t1.test_hive_75 = t12.test_hive_276 +join test_hive_1187 t13 on t1.test_hive_76 = t13.test_hive_277 +join test_hive_1203 t14 on t1.test_hive_77 = t14.test_hive_278 +join test_hive_1265 t15 on t1.test_hive_78 = t15.test_hive_306 +join test_hive_1313 t16 on t1.test_hive_80 = t16.test_hive_334 +join test_hive_1345 t17 on t1.test_hive_81 = t17.test_hive_336 +join test_hive_1361 t18 on t1.test_hive_82 = t18.test_hive_337 +join test_hive_1977 t19 on t1.test_hive_83 = t19.test_hive_454 +join test_hive_1977 t20 on t1.test_hive_84 = t20.test_hive_454 +join test_hive_1434 t21 on t1.test_hive_85 = t21.test_hive_413 +join test_hive_1448 t22 on t1.test_hive_86 = t22.test_hive_414 +join test_hive_1464 t23 on t1.test_hive_87 = t23.test_hive_415 +join test_hive_1478 t24 on t1.test_hive_88 = t24.test_hive_416 +join test_hive_1492 t25 on t1.test_hive_89 = t25.test_hive_417 +join test_hive_1508 t26 on t1.test_hive_90 = t26.test_hive_418 +join test_hive_1522 t27 on t1.test_hive_91 = t27.test_hive_419 +join test_hive_1538 t28 on t1.test_hive_92 = t28.test_hive_420 +join test_hive_1551 t29 on t1.test_hive_93 = t29.test_hive_421 +join test_hive_1227 t30 on t1.test_hive_94 = t30.test_hive_280 +join test_hive_1227 t31 on t1.test_hive_95 = t31.test_hive_280 +join test_hive_1615 t32 on t1.test_hive_96 = t32.test_hive_426 +join test_hive_1783 t33 on t1.test_hive_99 = t33.test_hive_440 +join test_hive_1977 t34 on t1.test_hive_101 = t34.test_hive_454 +join test_hive_1797 t35 on t1.test_hive_102 = t35.test_hive_441 +join test_hive_1811 t36 on t1.test_hive_103 = t36.test_hive_442 +join test_hive_1838 t37 on t1.test_hive_105 = t37.test_hive_444 +join test_hive_1850 t38 on t1.test_hive_106 = t38.test_hive_445 +join test_hive_1862 t39 on t1.test_hive_107 = t39.test_hive_446 +join test_hive_1874 t40 on t1.test_hive_108 = t40.test_hive_447 +join test_hive_1227 t41 on t1.test_hive_109 = t41.test_hive_280 +join test_hive_1227 t42 on t1.test_hive_110 = t42.test_hive_280 +join test_hive_1898 t43 on t1.test_hive_111 = t43.test_hive_449 +join test_hive_1911 t44 on t1.test_hive_112 = t44.test_hive_450 +join test_hive_1939 t45 on t1.test_hive_113 = t45.test_hive_451 +join test_hive_2014 t46 on t1.test_hive_114 = t46.test_hive_457 +join test_hive_2028 t47 on t1.test_hive_115 = t47.test_hive_458 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 is a root stage + Stage-3 is a root stage + Stage-4 is a root stage + Stage-5 is a root stage + Stage-6 is a root stage + Stage-7 is a root stage + Stage-8 is a root stage + Stage-9 is a root stage + Stage-10 is a root stage + Stage-11 is a root stage + Stage-12 is a root stage + Stage-13 is a root stage + Stage-14 is a root stage + Stage-15 is a root stage + Stage-16 is a root stage + Stage-17 is a root stage + Stage-18 is a root stage + Stage-19 is a root stage + Stage-20 is a root stage + Stage-21 is a root stage + Stage-22 is a root stage + Stage-23 is a root stage + Stage-24 is a root stage + Stage-25 is a root stage + Stage-26 is a root stage + Stage-27 is a root stage + Stage-28 is a root stage + Stage-29 is a root stage + Stage-30 is a root stage + Stage-31 is a root stage + Stage-32 is a root stage + Stage-33 is a root stage + Stage-34 is a root stage + Stage-35 is a root stage + Stage-36 is a root stage + Stage-37 is a root stage + Stage-38 is a root stage + Stage-39 is a root stage + Stage-40 is a root stage + Stage-41 is a root stage + Stage-42 is a root stage + Stage-43 is a root stage + Stage-44 is a root stage + Stage-45 is a root stage + Stage-46 is a root stage + Stage-47 depends on stages: Stage-1 + Stage-48 depends on stages: Stage-2 + Stage-49 depends on stages: Stage-3 + Stage-50 depends on stages: Stage-4 + Stage-51 depends on stages: Stage-5 + Stage-52 depends on stages: Stage-6 + Stage-53 depends on stages: Stage-7 + Stage-54 depends on stages: Stage-8 + Stage-55 depends on stages: Stage-9 + Stage-56 depends on stages: Stage-10 + Stage-57 depends on stages: Stage-11 + Stage-58 depends on stages: Stage-12 + Stage-59 depends on stages: Stage-13 + Stage-60 depends on stages: Stage-14 + Stage-61 depends on stages: Stage-15 + Stage-62 depends on stages: Stage-16 + Stage-63 depends on stages: Stage-17 + Stage-64 depends on stages: Stage-18 + Stage-65 depends on stages: Stage-19 + Stage-66 depends on stages: Stage-20 + Stage-67 depends on stages: Stage-21 + Stage-68 depends on stages: Stage-22 + Stage-69 depends on stages: Stage-23 + Stage-70 depends on stages: Stage-24 + Stage-71 depends on stages: Stage-25 + Stage-72 depends on stages: Stage-26 + Stage-73 depends on stages: Stage-27 + Stage-74 depends on stages: Stage-28 + Stage-75 depends on stages: Stage-29 + Stage-76 depends on stages: Stage-30 + Stage-77 depends on stages: Stage-31 + Stage-78 depends on stages: Stage-32 + Stage-79 depends on stages: Stage-33 + Stage-80 depends on stages: Stage-34 + Stage-81 depends on stages: Stage-35 + Stage-82 depends on stages: Stage-36 + Stage-83 depends on stages: Stage-37 + Stage-84 depends on stages: Stage-38 + Stage-85 depends on stages: Stage-39 + Stage-86 depends on stages: Stage-40 + Stage-87 depends on stages: Stage-41 + Stage-88 depends on stages: Stage-42 + Stage-89 depends on stages: Stage-43 + Stage-90 depends on stages: Stage-44 + Stage-91 depends on stages: Stage-45 + Stage-92 depends on stages: Stage-46 + Stage-93 depends on stages: Stage-47, Stage-48 , consists of Stage-407, Stage-408, Stage-2 + Stage-94 depends on stages: Stage-138, Stage-139, Stage-46 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce Local Work + Alias -> Map Local Tables: + t1:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t1:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col90 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-2 + Map Reduce Local Work + Alias -> Map Local Tables: + t2:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t2:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-3 + Map Reduce Local Work + Alias -> Map Local Tables: + t3:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t3:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + t4:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t4:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-5 + Map Reduce Local Work + Alias -> Map Local Tables: + t5:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t5:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-6 + Map Reduce Local Work + Alias -> Map Local Tables: + t7:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t7:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-7 + Map Reduce Local Work + Alias -> Map Local Tables: + t8:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t8:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-8 + Map Reduce Local Work + Alias -> Map Local Tables: + t9:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t9:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-9 + Map Reduce Local Work + Alias -> Map Local Tables: + t10:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t10:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-10 + Map Reduce Local Work + Alias -> Map Local Tables: + t11:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t11:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-11 + Map Reduce Local Work + Alias -> Map Local Tables: + t12:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t12:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-12 + Map Reduce Local Work + Alias -> Map Local Tables: + t13:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t13:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-13 + Map Reduce Local Work + Alias -> Map Local Tables: + t14:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t14:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-14 + Map Reduce Local Work + Alias -> Map Local Tables: + t15:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t15:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col12 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-15 + Map Reduce Local Work + Alias -> Map Local Tables: + t16:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t16:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-16 + Map Reduce Local Work + Alias -> Map Local Tables: + t17:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t17:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-17 + Map Reduce Local Work + Alias -> Map Local Tables: + t18:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t18:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-18 + Map Reduce Local Work + Alias -> Map Local Tables: + t19:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t19:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-19 + Map Reduce Local Work + Alias -> Map Local Tables: + t20:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t20:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-20 + Map Reduce Local Work + Alias -> Map Local Tables: + t21:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t21:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-21 + Map Reduce Local Work + Alias -> Map Local Tables: + t22:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t22:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-22 + Map Reduce Local Work + Alias -> Map Local Tables: + t23:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t23:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-23 + Map Reduce Local Work + Alias -> Map Local Tables: + t24:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t24:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-24 + Map Reduce Local Work + Alias -> Map Local Tables: + t25:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t25:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-25 + Map Reduce Local Work + Alias -> Map Local Tables: + t26:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t26:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-26 + Map Reduce Local Work + Alias -> Map Local Tables: + t27:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t27:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-27 + Map Reduce Local Work + Alias -> Map Local Tables: + t28:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t28:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-28 + Map Reduce Local Work + Alias -> Map Local Tables: + t29:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t29:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-29 + Map Reduce Local Work + Alias -> Map Local Tables: + t30:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t30:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-30 + Map Reduce Local Work + Alias -> Map Local Tables: + t31:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t31:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-31 + Map Reduce Local Work + Alias -> Map Local Tables: + t32:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t32:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-32 + Map Reduce Local Work + Alias -> Map Local Tables: + t33:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t33:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-33 + Map Reduce Local Work + Alias -> Map Local Tables: + t34:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t34:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-34 + Map Reduce Local Work + Alias -> Map Local Tables: + t35:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t35:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-35 + Map Reduce Local Work + Alias -> Map Local Tables: + t36:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t36:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-36 + Map Reduce Local Work + Alias -> Map Local Tables: + t37:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t37:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-37 + Map Reduce Local Work + Alias -> Map Local Tables: + t38:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t38:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-38 + Map Reduce Local Work + Alias -> Map Local Tables: + t39:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t39:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-39 + Map Reduce Local Work + Alias -> Map Local Tables: + t40:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t40:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-40 + Map Reduce Local Work + Alias -> Map Local Tables: + t41:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t41:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-41 + Map Reduce Local Work + Alias -> Map Local Tables: + t42:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t42:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-42 + Map Reduce Local Work + Alias -> Map Local Tables: + t43:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t43:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-43 + Map Reduce Local Work + Alias -> Map Local Tables: + t44:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t44:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-44 + Map Reduce Local Work + Alias -> Map Local Tables: + t45:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t45:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-45 + Map Reduce Local Work + Alias -> Map Local Tables: + t46:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t46:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-46 + Map Reduce Local Work + Alias -> Map Local Tables: + t47:t2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + t47:t2 + TableScan + alias: t2 + filterExpr: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: max_partition is not null (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + HashTable Sink Operator + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + + Stage: Stage-47 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1038 + filterExpr: (ds_ts is not null and test_hive_23 is not null and test_hive_26 is not null and test_hive_27 is not null and test_hive_28 is not null and test_hive_30 is not null and test_hive_71 is not null and test_hive_72 is not null and test_hive_73 is not null and UDFToDouble(test_hive_74) is not null and test_hive_75 is not null and test_hive_76 is not null and UDFToDouble(test_hive_77) is not null and test_hive_78 is not null and test_hive_80 is not null and test_hive_81 is not null and test_hive_82 is not null and test_hive_83 is not null and test_hive_84 is not null and UDFToDouble(test_hive_85) is not null and test_hive_86 is not null and UDFToDouble(test_hive_87) is not null and test_hive_88 is not null and test_hive_89 is not null and UDFToDouble(test_hive_90) is not null and test_hive_91 is not null and UDFToDouble(test_hive_92) is not null and test_hive_93 is not null and test_hive_94 is not null and test_hive_95 is not null and test_hive_96 is not null and test_hive_99 is not null and test_hive_101 is not null and test_hive_102 is not null and test_hive_103 is not null and test_hive_105 is not null and UDFToDouble(test_hive_106) is not null and test_hive_107 is not null and test_hive_108 is not null and test_hive_109 is not null and test_hive_110 is not null and test_hive_111 is not null and test_hive_112 is not null and test_hive_113 is not null and test_hive_114 is not null and test_hive_115 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(test_hive_106) is not null and UDFToDouble(test_hive_74) is not null and UDFToDouble(test_hive_77) is not null and UDFToDouble(test_hive_85) is not null and UDFToDouble(test_hive_87) is not null and UDFToDouble(test_hive_90) is not null and UDFToDouble(test_hive_92) is not null and ds_ts is not null and test_hive_101 is not null and test_hive_102 is not null and test_hive_103 is not null and test_hive_105 is not null and test_hive_107 is not null and test_hive_108 is not null and test_hive_109 is not null and test_hive_110 is not null and test_hive_111 is not null and test_hive_112 is not null and test_hive_113 is not null and test_hive_114 is not null and test_hive_115 is not null and test_hive_23 is not null and test_hive_26 is not null and test_hive_27 is not null and test_hive_28 is not null and test_hive_30 is not null and test_hive_71 is not null and test_hive_72 is not null and test_hive_73 is not null and test_hive_75 is not null and test_hive_76 is not null and test_hive_78 is not null and test_hive_80 is not null and test_hive_81 is not null and test_hive_82 is not null and test_hive_83 is not null and test_hive_84 is not null and test_hive_86 is not null and test_hive_88 is not null and test_hive_89 is not null and test_hive_91 is not null and test_hive_93 is not null and test_hive_94 is not null and test_hive_95 is not null and test_hive_96 is not null and test_hive_99 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: UDFToInteger(test_hive_1018) (type: int), UDFToInteger(test_hive_1004) (type: int), UDFToInteger(test_hive_1025) (type: int), test_hive_23 (type: string), test_hive_27 (type: string), test_hive_29 (type: string), test_hive_30 (type: string), test_hive_97 (type: string), test_hive_96 (type: string), test_hive_98 (type: string), test_hive_101 (type: string), test_hive_102 (type: string), test_hive_109 (type: string), test_hive_111 (type: string), test_hive_112 (type: string), test_hive_113 (type: string), test_hive_114 (type: string), test_hive_115 (type: string), test_hive_78 (type: string), test_hive_79 (type: string), test_hive_24 (type: string), test_hive_26 (type: string), test_hive_110 (type: string), test_hive_77 (type: string), test_hive_87 (type: string), test_hive_92 (type: string), test_hive_90 (type: string), test_hive_74 (type: string), test_hive_85 (type: string), test_hive_81 (type: string), test_hive_82 (type: string), test_hive_106 (type: string), test_hive_107 (type: string), test_hive_108 (type: string), test_hive_75 (type: string), test_hive_86 (type: string), test_hive_76 (type: string), test_hive_89 (type: string), test_hive_88 (type: string), test_hive_91 (type: string), test_hive_71 (type: string), test_hive_72 (type: string), test_hive_73 (type: string), test_hive_80 (type: string), test_hive_103 (type: string), test_hive_104 (type: string), test_hive_1002 (type: string), test_hive_1003 (type: string), CAST( from_unixtime(to_unix_timestamp(test_hive_25,'yyyymmdd'), 'yyyy-mm-dd') AS TIMESTAMP) (type: timestamp), test_hive_28 (type: string), test_hive_93 (type: string), test_hive_94 (type: string), test_hive_95 (type: string), test_hive_99 (type: string), test_hive_105 (type: string), test_hive_83 (type: string), test_hive_84 (type: string), test_hive_100 (type: string), UDFToInteger(test_hive_1023) (type: int), UDFToInteger(test_hive_1024) (type: int), UDFToInteger(test_hive_1010) (type: int), UDFToInteger(test_hive_1010_a_d) (type: int), UDFToInteger(test_hive_1010_a_g) (type: int), UDFToDouble(test_hive_1026) (type: double), UDFToDouble(test_hive_1000) (type: double), UDFToDouble(test_hive_1001) (type: double), UDFToInteger(test_hive_1030) (type: int), UDFToInteger(test_hive_1030_1) (type: int), UDFToInteger(test_hive_1030_2) (type: int), UDFToInteger(test_hive_1030_3) (type: int), UDFToDouble(test_hive_1021) (type: double), UDFToDouble(test_hive_1020) (type: double), UDFToInteger(test_hive_1022) (type: int), UDFToInteger(test_hive_1019) (type: int), UDFToDouble(test_hive_1027) (type: double), UDFToDouble(test_hive_1028) (type: double), UDFToDouble(test_hive_1029) (type: double), UDFToInteger(test_hive_1005) (type: int), UDFToInteger(test_hive_1005_a_d) (type: int), UDFToInteger(test_hive_1005_psr) (type: int), UDFToInteger(test_hive_1005_psr_a_d) (type: int), UDFToInteger(test_hive_1005_psr_e) (type: int), UDFToInteger(test_hive_1013) (type: int), UDFToInteger(test_hive_1013_a_d) (type: int), UDFToInteger(test_hive_1013_psr) (type: int), UDFToInteger(test_hive_1013_psr_a_d) (type: int), UDFToInteger(test_hive_1013_psr_e) (type: int), CAST( from_unixtime(to_unix_timestamp(test_hive_1034,'yyyymmddhhmmss'), 'yyyy-mm-dd hh:mm:ss') AS TIMESTAMP) (type: timestamp), ds_ts (type: bigint) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55, _col56, _col57, _col58, _col59, _col60, _col61, _col62, _col63, _col64, _col65, _col66, _col67, _col68, _col69, _col70, _col71, _col72, _col73, _col74, _col75, _col76, _col77, _col78, _col79, _col80, _col81, _col82, _col83, _col84, _col85, _col86, _col87, _col90 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col90 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44, _col45, _col46, _col47, _col48, _col49, _col50, _col51, _col52, _col53, _col54, _col55, _col56, _col57, _col58, _col59, _col60, _col61, _col62, _col63, _col64, _col65, _col66, _col67, _col68, _col69, _col70, _col71, _col72, _col73, _col74, _col75, _col76, _col77, _col78, _col79, _col80, _col81, _col82, _col83, _col84, _col85, _col86, _col87 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-48 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1565 + filterExpr: (ds_ts is not null and test_hive_422 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_422 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_422 (type: string), test_hive_1560 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-49 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1723 + filterExpr: (ds_ts is not null and test_hive_434 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_434 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_434 (type: string), test_hive_1716 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-50 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1279 + filterExpr: (ds_ts is not null and test_hive_308 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_308 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_308 (type: string), test_hive_1274 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-51 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1965 + filterExpr: (ds_ts is not null and test_hive_453 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_453 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_453 (type: string), test_hive_1960 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-52 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1953 + filterExpr: (ds_ts is not null and test_hive_452 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_452 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_452 (type: string), test_hive_1948 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-53 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1117 + filterExpr: (ds_ts is not null and test_hive_272 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_272 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_272 (type: string), test_hive_1110 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-54 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1131 + filterExpr: (ds_ts is not null and test_hive_273 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_273 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_273 (type: string), test_hive_1124 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-55 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1145 + filterExpr: (ds_ts is not null and test_hive_274 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_274 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_274 (type: string), test_hive_1138 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-56 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1161 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_275 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_275 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_275 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1154 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-57 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1175 + filterExpr: (ds_ts is not null and test_hive_276 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_276 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_276 (type: string), test_hive_1168 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-58 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1189 + filterExpr: (ds_ts is not null and test_hive_277 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_277 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_277 (type: string), test_hive_1182 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-59 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1205 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_278 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_278 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_278 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1198 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-60 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1267 + filterExpr: (ds_ts is not null and test_hive_306 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_306 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_306 (type: string), test_hive_1260 (type: string), ds_ts (type: bigint) + outputColumnNames: _col4, _col8, _col12 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col12 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col4, _col8 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-61 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1315 + filterExpr: (ds_ts is not null and test_hive_334 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_334 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_334 (type: string), test_hive_1309 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col6, _col10 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col6 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-62 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1347 + filterExpr: (ds_ts is not null and test_hive_336 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_336 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_336 (type: string), test_hive_1340 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-63 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1363 + filterExpr: (ds_ts is not null and test_hive_337 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_337 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_337 (type: string), test_hive_1356 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-64 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1979 + filterExpr: (ds_ts is not null and test_hive_454 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_454 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_454 (type: string), test_hive_1972 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-65 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1979 + filterExpr: (ds_ts is not null and test_hive_454 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_454 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_454 (type: string), test_hive_1972 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-66 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1436 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_413 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_413 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_413 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1429 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-67 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1450 + filterExpr: (ds_ts is not null and test_hive_414 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_414 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_414 (type: string), test_hive_1443 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-68 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1466 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_415 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_415 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_415 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1459 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-69 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1480 + filterExpr: (ds_ts is not null and test_hive_416 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_416 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_416 (type: string), test_hive_1473 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-70 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1494 + filterExpr: (ds_ts is not null and test_hive_417 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_417 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_417 (type: string), test_hive_1487 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-71 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1510 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_418 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_418 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_418 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1503 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-72 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1524 + filterExpr: (ds_ts is not null and test_hive_419 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_419 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_419 (type: string), test_hive_1517 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-73 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1540 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_420 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_420 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_420 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1533 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col9, _col13 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col13 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-74 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1553 + filterExpr: (ds_ts is not null and test_hive_421 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_421 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_421 (type: string), test_hive_1547 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col6, _col10 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col6 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-75 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1229 + filterExpr: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_280 (type: string), test_hive_1224 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-76 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1229 + filterExpr: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_280 (type: string), test_hive_1224 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-77 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1617 + filterExpr: (ds_ts is not null and test_hive_426 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_426 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_426 (type: string), test_hive_1610 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-78 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1785 + filterExpr: (ds_ts is not null and test_hive_440 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_440 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_440 (type: string), test_hive_1778 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-79 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1979 + filterExpr: (ds_ts is not null and test_hive_454 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_454 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_454 (type: string), test_hive_1972 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-80 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1799 + filterExpr: (ds_ts is not null and test_hive_441 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_441 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_441 (type: string), test_hive_1792 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-81 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1813 + filterExpr: (ds_ts is not null and test_hive_442 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_442 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_442 (type: string), test_hive_1806 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-82 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1840 + filterExpr: (ds_ts is not null and test_hive_444 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_444 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_444 (type: string), test_hive_1834 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col6, _col10 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col6 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-83 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1852 + filterExpr: (ds_ts is not null and UDFToDouble(CAST( test_hive_445 AS decimal(10,0))) is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (UDFToDouble(CAST( test_hive_445 AS decimal(10,0))) is not null and ds_ts is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: CAST( test_hive_445 AS decimal(10,0)) (type: decimal(10,0)), test_hive_1847 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-84 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1864 + filterExpr: (ds_ts is not null and test_hive_446 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_446 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_446 (type: string), test_hive_1859 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-85 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1876 + filterExpr: (ds_ts is not null and test_hive_447 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_447 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_447 (type: string), test_hive_1871 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-86 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1229 + filterExpr: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_280 (type: string), test_hive_1224 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-87 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1229 + filterExpr: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_280 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_280 (type: string), test_hive_1224 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-88 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1900 + filterExpr: (ds_ts is not null and test_hive_449 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_449 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_449 (type: string), test_hive_1895 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col5, _col9 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col9 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-89 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1913 + filterExpr: (ds_ts is not null and test_hive_450 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_450 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_450 (type: string), test_hive_1907 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col6, _col10 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col6 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-90 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_1941 + filterExpr: (ds_ts is not null and test_hive_451 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_451 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_451 (type: string), test_hive_1935 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col6, _col10 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col6 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-91 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_2016 + filterExpr: (ds_ts is not null and test_hive_457 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_457 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_457 (type: string), test_hive_2010 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col6, _col10 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col10 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col6 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-92 + Map Reduce + Map Operator Tree: + TableScan + alias: test_hive_2030 + filterExpr: (ds_ts is not null and test_hive_458 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Filter Operator + predicate: (ds_ts is not null and test_hive_458 is not null) (type: boolean) + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: test_hive_458 (type: string), test_hive_2023 (type: string), ds_ts (type: bigint) + outputColumnNames: _col3, _col7, _col11 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 _col11 (type: bigint) + 1 max_partition (type: bigint) + outputColumnNames: _col3, _col7 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-93 + Conditional Operator + + Stage: Stage-94 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + diff --git ql/src/test/results/clientpositive/mapjoin2.q.out ql/src/test/results/clientpositive/mapjoin2.q.out index 6b85e13097..2288b4b29e 100644 --- ql/src/test/results/clientpositive/mapjoin2.q.out +++ ql/src/test/results/clientpositive/mapjoin2.q.out @@ -26,6 +26,85 @@ POSTHOOK: Input: _dummy_database@_dummy_table POSTHOOK: Output: default@tbl_n1 POSTHOOK: Lineage: tbl_n1.n SCRIPT [] POSTHOOK: Lineage: tbl_n1.t SCRIPT [] +Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:tbl_n1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:tbl_n1 + TableScan + alias: tbl_n1 + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: false (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: n (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 1L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 1L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: t (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 1L (type: bigint), _col0 (type: string), _col1 is null (type: boolean), _col2 is null (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a left outer join (select * from tbl_n1 where 1 = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -36,6 +115,91 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### 1 one true true +Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:tbl_n1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:tbl_n1 + TableScan + alias: tbl_n1 + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: false (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: n (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 2L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 2L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: t (type: string) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 is null (type: boolean), _col1 is null (type: boolean), 2L (type: bigint), _col2 (type: string) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl_n1 where 2 = 1) a right outer join (select * from tbl_n1 where n = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -46,6 +210,81 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### true true 2 two +Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +PREHOOK: type: QUERY +POSTHOOK: query: explain +select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: tbl_n1 + filterExpr: (n = 1L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 1L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 1L (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: string) + TableScan + alias: tbl_n1 + filterExpr: (n = 2L) (type: boolean) + Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (n = 2L) (type: boolean) + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: 2L (type: bigint), t (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint), _col1 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 + 1 {false} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 is null (type: boolean), _col1 is null (type: boolean), _col2 is null (type: boolean), _col3 is null (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl_n1 where n = 1) a full outer join (select * from tbl_n1 where n = 2) b on a.n = b.n PREHOOK: type: QUERY @@ -57,6 +296,74 @@ POSTHOOK: Input: default@tbl_n1 #### A masked pattern was here #### false false true true true true false false +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Inner Join 0 to 1 + keys: + 0 + 1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 1 (type: int), 0 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -67,6 +374,77 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col1, _col2 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), _col1 (type: int), 0 (type: int), _col2 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -77,6 +455,83 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), 1 (type: int), _col2 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY @@ -87,6 +542,83 @@ POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### 11 1 1 0 0 +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +PREHOOK: type: QUERY +POSTHOOK: query: explain +select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:_dummy_table + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:_dummy_table + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 11 (type: int), 1 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + HashTable Sink Operator + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: _dummy_table + Row Limit Per Split: 1 + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + filter predicates: + 0 + 1 {true} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: _col0 (type: int), _col1 (type: int), 1 (type: int), _col2 (type: int), 0 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/mapjoin46.q.out ql/src/test/results/clientpositive/mapjoin46.q.out index febb6c7940..b6f8b195a2 100644 --- ql/src/test/results/clientpositive/mapjoin46.q.out +++ ql/src/test/results/clientpositive/mapjoin46.q.out @@ -124,14 +124,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema -100 1 Bob NULL NULL NULL -101 2 Car 102 2 Del -101 2 Car 103 2 Ema +NULL NULL None NULL NULL NULL PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 LEFT OUTER JOIN test2_n2 @@ -234,12 +234,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat NULL NULL NULL -100 1 Bob NULL NULL NULL -101 2 Car 102 2 Del +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -340,12 +340,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL +100 1 Bob 102 2 Del +101 2 Car 102 2 Del 98 NULL None NULL NULL NULL 99 0 Alice NULL NULL NULL 99 2 Mat NULL NULL NULL -100 1 Bob 102 2 Del -101 2 Car 102 2 Del +NULL NULL None NULL NULL NULL PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 RIGHT OUTER JOIN test2_n2 @@ -430,10 +430,10 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -99 2 Mat 102 2 Del 101 2 Car 102 2 Del -99 2 Mat 103 2 Ema 101 2 Car 103 2 Ema +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product @@ -528,10 +528,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat NULL NULL NULL 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -540,6 +536,10 @@ NULL NULL None NULL NULL NULL 101 2 Car 103 2 Ema 101 2 Car 104 3 Fli 101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat NULL NULL NULL +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -635,11 +635,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -98 NULL None 102 2 Del -99 0 Alice 102 2 Del -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -648,6 +643,11 @@ NULL NULL None 102 2 Del 101 2 Car 103 2 Ema 101 2 Car 104 3 Fli 101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -739,11 +739,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 102 2 Del 100 1 Bob 103 2 Ema 100 1 Bob 104 3 Fli @@ -752,6 +747,11 @@ NULL NULL None NULL NULL NULL 101 2 Car 103 2 Ema 101 2 Car 104 3 Fli 101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -843,14 +843,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema 98 NULL None 102 2 Del 99 0 Alice 102 2 Del 99 2 Mat 102 2 Del 99 2 Mat 103 2 Ema -100 1 Bob 102 2 Del -101 2 Car 102 2 Del -101 2 Car 103 2 Ema +NULL NULL None 102 2 Del PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 LEFT OUTER JOIN test2_n2 @@ -944,13 +944,13 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None NULL NULL NULL -98 NULL None NULL NULL NULL -99 0 Alice NULL NULL NULL -99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL None NULL NULL NULL Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -1046,19 +1046,19 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del -98 NULL None 102 2 Del -99 0 Alice 102 2 Del -99 2 Mat 102 2 Del 100 1 Bob 102 2 Del -101 2 Car 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 103 2 Ema -101 2 Car 103 2 Ema 100 1 Bob 104 3 Fli -101 2 Car 104 3 Fli 100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli 101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -1150,16 +1150,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -99 2 Mat 102 2 Del 100 1 Bob 102 2 Del -101 2 Car 102 2 Del -99 2 Mat 103 2 Ema 100 1 Bob 103 2 Ema -101 2 Car 103 2 Ema 100 1 Bob 104 3 Fli -101 2 Car 104 3 Fli 100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli 101 2 Car 105 NULL None +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * @@ -1251,16 +1251,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL None 102 2 Del +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema 98 NULL None 102 2 Del 99 0 Alice 102 2 Del 99 2 Mat 102 2 Del -100 1 Bob 102 2 Del -101 2 Car 102 2 Del 99 2 Mat 103 2 Ema -101 2 Car 103 2 Ema NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 RIGHT OUTER JOIN test2_n2 @@ -1354,9 +1354,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -99 2 Mat 102 2 Del 101 2 Car 102 2 Del 101 2 Car 103 2 Ema +99 2 Mat 102 2 Del NULL NULL NULL 104 3 Fli NULL NULL NULL 105 NULL None Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product @@ -1407,7 +1407,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1447,31 +1447,33 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -101 2 Car 105 NULL None -101 2 Car 104 3 Fli -101 2 Car 103 2 Ema -101 2 Car 102 2 Del -100 1 Bob 105 NULL None -100 1 Bob 104 3 Fli -100 1 Bob 103 2 Ema 100 1 Bob 102 2 Del -99 2 Mat 103 2 Ema -99 2 Mat 102 2 Del -99 0 Alice 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None 98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema NULL NULL None 102 2 Del Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1506,12 +1508,12 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -1531,7 +1533,8 @@ Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAP PREHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) PREHOOK: type: QUERY PREHOOK: Input: default@test1_n4 PREHOOK: Input: default@test2_n2 @@ -1539,36 +1542,37 @@ PREHOOK: Input: default@test2_n2 POSTHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test1_n4.key between 100 and 102) + OR test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102) POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -101 2 Car 105 NULL None -101 2 Car 104 3 Fli -101 2 Car 103 2 Ema -101 2 Car 102 2 Del -100 1 Bob 105 NULL None -100 1 Bob 104 3 Fli -100 1 Bob 103 2 Ema 100 1 Bob 102 2 Del -99 2 Mat 103 2 Ema +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del 99 2 Mat 102 2 Del -99 0 Alice NULL NULL NULL -98 NULL None NULL NULL NULL -NULL NULL None NULL NULL NULL +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1603,12 +1607,12 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false @@ -1628,7 +1632,7 @@ Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAP PREHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY PREHOOK: Input: default@test1_n4 PREHOOK: Input: default@test2_n2 @@ -1636,34 +1640,36 @@ PREHOOK: Input: default@test2_n2 POSTHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - OR test2_n2.key between 100 and 102) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -101 2 Car 103 2 Ema -101 2 Car 102 2 Del 100 1 Bob 102 2 Del -99 2 Mat 103 2 Ema +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL 99 2 Mat 102 2 Del -99 0 Alice 102 2 Del -98 NULL None 102 2 Del -NULL NULL None 102 2 Del -NULL NULL NULL 105 NULL None -NULL NULL NULL 104 3 Fli +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage @@ -1681,11 +1687,9 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col1 (type: int) - sort order: + - Map-reduce partition columns: _col1 (type: int) + sort order: Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col2 (type: string) + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) TableScan alias: test2_n2 Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE @@ -1694,24 +1698,22 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator - key expressions: _col1 (type: int) - sort order: + - Map-reduce partition columns: _col1 (type: int) + sort order: Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col2 (type: string) + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: - 0 _col1 (type: int) - 1 _col1 (type: int) + 0 + 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} - Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1723,11 +1725,11 @@ STAGE PLANS: Processor Tree: ListSink +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) PREHOOK: type: QUERY PREHOOK: Input: default@test1_n4 PREHOOK: Input: default@test2_n2 @@ -1735,37 +1737,426 @@ PREHOOK: Input: default@test2_n2 POSTHOOK: query: SELECT * FROM test1_n4 FULL OUTER JOIN test2_n2 ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) + OR test1_n4.key between 100 and 102) POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None 98 NULL None NULL NULL NULL -NULL NULL None NULL NULL NULL -NULL NULL NULL 105 NULL None 99 0 Alice NULL NULL NULL -100 1 Bob NULL NULL NULL -101 2 Car 103 2 Ema -101 2 Car 102 2 Del 99 2 Mat 102 2 Del -NULL NULL NULL 104 3 Fli -Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product PREHOOK: query: EXPLAIN SELECT * -FROM ( - SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, - test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 - FROM test1_n4 RIGHT OUTER JOIN test2_n2 - ON (test1_n4.value=test2_n2.value - AND (test1_n4.key between 100 and 102 - OR test2_n2.key between 100 and 102)) - ) sq1 -FULL OUTER JOIN ( - SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, - test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 - FROM test1_n4 LEFT OUTER JOIN test2_n2 - ON (test1_n4.value=test2_n2.value +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + OR test2_n2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del +PREHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col2 (type: string) + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col2 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None NULL NULL NULL +PREHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col2 (type: string) + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col2 (type: string) + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1_n4 FULL OUTER JOIN test2_n2 +ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None NULL NULL NULL +Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product +PREHOOK: query: EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value AND (test1_n4.key between 100 and 102 OR test2_n2.key between 100 and 102)) ) sq2 @@ -1880,7 +2271,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1946,23 +2337,239 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@test1_n4 POSTHOOK: Input: default@test2_n2 #### A masked pattern was here #### -NULL NULL NULL 105 NULL None 101 2 Car 103 2 Ema -NULL NULL NULL 105 NULL None 101 2 Car 102 2 Del -NULL NULL NULL 105 NULL None 100 1 Bob NULL NULL NULL -NULL NULL NULL 105 NULL None 99 2 Mat 102 2 Del -NULL NULL NULL 105 NULL None 99 0 Alice NULL NULL NULL -NULL NULL NULL 105 NULL None 98 NULL None NULL NULL NULL -NULL NULL NULL 105 NULL None NULL NULL None NULL NULL NULL -NULL NULL NULL 104 3 Fli 101 2 Car 103 2 Ema -NULL NULL NULL 104 3 Fli 101 2 Car 102 2 Del +101 2 Car 102 2 Del 100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del 99 0 Alice NULL NULL NULL +101 2 Car 103 2 Ema 100 1 Bob NULL NULL NULL +101 2 Car 103 2 Ema 99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL +99 2 Mat 102 2 Del 99 0 Alice NULL NULL NULL NULL NULL NULL 104 3 Fli 100 1 Bob NULL NULL NULL -NULL NULL NULL 104 3 Fli 99 2 Mat 102 2 Del -NULL NULL NULL 104 3 Fli 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 101 2 Car 102 2 Del +NULL NULL NULL 104 3 Fli 101 2 Car 103 2 Ema NULL NULL NULL 104 3 Fli 98 NULL None NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 2 Mat 102 2 Del NULL NULL NULL 104 3 Fli NULL NULL None NULL NULL NULL -101 2 Car 103 2 Ema 100 1 Bob NULL NULL NULL -101 2 Car 103 2 Ema 99 0 Alice NULL NULL NULL +NULL NULL NULL 105 NULL None 100 1 Bob NULL NULL NULL +NULL NULL NULL 105 NULL None 101 2 Car 102 2 Del +NULL NULL NULL 105 NULL None 101 2 Car 103 2 Ema +NULL NULL NULL 105 NULL None 98 NULL None NULL NULL NULL +NULL NULL NULL 105 NULL None 99 0 Alice NULL NULL NULL +NULL NULL NULL 105 NULL None 99 2 Mat 102 2 Del +NULL NULL NULL 105 NULL None NULL NULL None NULL NULL NULL +Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product +PREHOOK: query: EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-8 is a root stage + Stage-2 depends on stages: Stage-8 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-8 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:test1_n4 + Fetch Operator + limit: -1 + $hdt$_2:$hdt$_3:test2_n2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:test1_n4 + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + $hdt$_2:$hdt$_3:test2_n2 + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + alias: test2_n2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string) + TableScan + alias: test1_n4 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: int), _col4 (type: int), _col5 (type: string) + Local Work: + Map Reduce Local Work + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + residual filter predicates: {(_col1 is null or (_col10 is null and (_col7 <> _col4)))} + Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 36 Data size: 768 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[17][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-2:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1_n4 +PREHOOK: Input: default@test2_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM ( + SELECT test1_n4.key AS key1, test1_n4.value AS value1, test1_n4.col_1 AS col_1, + test2_n2.key AS key2, test2_n2.value AS value2, test2_n2.col_2 AS col_2 + FROM test1_n4 RIGHT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq1 +FULL OUTER JOIN ( + SELECT test1_n4.key AS key3, test1_n4.value AS value3, test1_n4.col_1 AS col_3, + test2_n2.key AS key4, test2_n2.value AS value4, test2_n2.col_2 AS col_4 + FROM test1_n4 LEFT OUTER JOIN test2_n2 + ON (test1_n4.value=test2_n2.value + AND (test1_n4.key between 100 and 102 + OR test2_n2.key between 100 and 102)) + ) sq2 +ON (sq1.value1 is null or sq2.value4 is null and sq2.value3 != sq1.value2) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1_n4 +POSTHOOK: Input: default@test2_n2 +#### A masked pattern was here #### 101 2 Car 102 2 Del 100 1 Bob NULL NULL NULL 101 2 Car 102 2 Del 99 0 Alice NULL NULL NULL +101 2 Car 103 2 Ema 100 1 Bob NULL NULL NULL +101 2 Car 103 2 Ema 99 0 Alice NULL NULL NULL 99 2 Mat 102 2 Del 100 1 Bob NULL NULL NULL 99 2 Mat 102 2 Del 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 100 1 Bob NULL NULL NULL +NULL NULL NULL 104 3 Fli 101 2 Car 102 2 Del +NULL NULL NULL 104 3 Fli 101 2 Car 103 2 Ema +NULL NULL NULL 104 3 Fli 98 NULL None NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 0 Alice NULL NULL NULL +NULL NULL NULL 104 3 Fli 99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli NULL NULL None NULL NULL NULL +NULL NULL NULL 105 NULL None 100 1 Bob NULL NULL NULL +NULL NULL NULL 105 NULL None 101 2 Car 102 2 Del +NULL NULL NULL 105 NULL None 101 2 Car 103 2 Ema +NULL NULL NULL 105 NULL None 98 NULL None NULL NULL NULL +NULL NULL NULL 105 NULL None 99 0 Alice NULL NULL NULL +NULL NULL NULL 105 NULL None 99 2 Mat 102 2 Del +NULL NULL NULL 105 NULL None NULL NULL None NULL NULL NULL diff --git ql/src/test/results/clientpositive/mapjoin47.q.out ql/src/test/results/clientpositive/mapjoin47.q.out index 172d160772..c42094d785 100644 --- ql/src/test/results/clientpositive/mapjoin47.q.out +++ ql/src/test/results/clientpositive/mapjoin47.q.out @@ -1415,7 +1415,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1521,7 +1521,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/mergejoin.q.out ql/src/test/results/clientpositive/mergejoin.q.out index 7cbcbbe9a8..95b961f4ab 100644 --- ql/src/test/results/clientpositive/mergejoin.q.out +++ ql/src/test/results/clientpositive/mergejoin.q.out @@ -1706,7 +1706,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/mergejoins_mixed.q.out ql/src/test/results/clientpositive/mergejoins_mixed.q.out index 9ac6d59fc7..4d94085aee 100644 --- ql/src/test/results/clientpositive/mergejoins_mixed.q.out +++ ql/src/test/results/clientpositive/mergejoins_mixed.q.out @@ -820,7 +820,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col3 (type: string) 1 _col0 (type: string) @@ -859,7 +859,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1161,7 +1161,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1463,7 +1463,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out index f7475f5984..ba88ccb714 100644 --- ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out +++ ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out @@ -27,7 +27,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -333,7 +333,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -639,7 +639,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -955,7 +955,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1846,7 +1846,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2147,7 +2147,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2448,7 +2448,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2759,7 +2759,7 @@ STAGE PLANS: Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/optional_outer.q.out ql/src/test/results/clientpositive/optional_outer.q.out index 9ec1af7cba..efc952c8e8 100644 --- ql/src/test/results/clientpositive/optional_outer.q.out +++ ql/src/test/results/clientpositive/optional_outer.q.out @@ -283,7 +283,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -344,7 +344,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/outer_reference_windowed.q.out ql/src/test/results/clientpositive/outer_reference_windowed.q.out index ece73a1f50..753073321d 100644 --- ql/src/test/results/clientpositive/outer_reference_windowed.q.out +++ ql/src/test/results/clientpositive/outer_reference_windowed.q.out @@ -339,7 +339,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -513,7 +513,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -691,7 +691,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -846,7 +846,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/parquet_vectorization_0.q.out ql/src/test/results/clientpositive/parquet_vectorization_0.q.out index 4156c5d921..44253c6296 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_0.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_0.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(ctinyint) as c1, MAX(ctinyint), COUNT(ctinyint), @@ -6,7 +6,7 @@ SELECT MIN(ctinyint) as c1, FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(ctinyint) as c1, MAX(ctinyint), COUNT(ctinyint), @@ -32,7 +32,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint @@ -72,12 +71,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -101,7 +94,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:tinyint, 1:_col1:tinyint, 2:_col2:bigint, 3:_col3:bigint] Reduce Output Operator key expressions: _col0 (type: tinyint) sort order: + @@ -122,12 +114,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - includeColumns: [0, 1, 2, 3] - dataColumns: _col0:tinyint, _col1:tinyint, _col2:bigint, _col3:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -170,12 +156,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -64 62 9173 12288 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(ctinyint) as c1 FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(ctinyint) as c1 FROM alltypesparquet ORDER BY c1 @@ -198,7 +184,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint @@ -238,12 +223,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -267,7 +246,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:bigint] Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -287,12 +265,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - includeColumns: [0] - dataColumns: _col0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -486,7 +458,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -4.344925324321378 1158.3003004768175 1158.3003004768175 1158.426587033782 34.03381113652741 34.03381113652741 34.03381113652741 34.03566639620535 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cbigint) as c1, MAX(cbigint), COUNT(cbigint), @@ -494,7 +466,7 @@ SELECT MIN(cbigint) as c1, FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cbigint) as c1, MAX(cbigint), COUNT(cbigint), @@ -520,7 +492,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cbigint (type: bigint) outputColumnNames: cbigint @@ -560,12 +531,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [3] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -589,7 +554,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:bigint, 1:_col1:bigint, 2:_col2:bigint, 3:_col3:bigint] Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -610,12 +574,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - includeColumns: [0, 1, 2, 3] - dataColumns: _col0:bigint, _col1:bigint, _col2:bigint, _col3:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -658,12 +616,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -2147311592 2145498388 9173 12288 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cbigint) as c1 FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cbigint) as c1 FROM alltypesparquet ORDER BY c1 @@ -686,7 +644,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cbigint (type: bigint) outputColumnNames: cbigint @@ -726,12 +683,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [3] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -755,7 +706,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:bigint] Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -775,12 +725,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - includeColumns: [0] - dataColumns: _col0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -974,7 +918,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -1.8515862077935246E8 2.07689300543066035E18 2.07689300543066035E18 2.07711944383072922E18 1.441142951074133E9 1.441142951074133E9 1.441142951074133E9 1.4412215110213728E9 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cfloat) as c1, MAX(cfloat), COUNT(cfloat), @@ -982,7 +926,7 @@ SELECT MIN(cfloat) as c1, FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cfloat) as c1, MAX(cfloat), COUNT(cfloat), @@ -1008,7 +952,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat @@ -1048,12 +991,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [4] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -1077,7 +1014,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:float, 1:_col1:float, 2:_col2:bigint, 3:_col3:bigint] Reduce Output Operator key expressions: _col0 (type: float) sort order: + @@ -1098,12 +1034,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - includeColumns: [0, 1, 2, 3] - dataColumns: _col0:float, _col1:float, _col2:bigint, _col3:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -1146,12 +1076,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -64.0 79.553 9173 12288 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cfloat) as c1 FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cfloat) as c1 FROM alltypesparquet ORDER BY c1 @@ -1174,7 +1104,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat @@ -1214,12 +1143,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [4] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -1243,7 +1166,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:double] Reduce Output Operator key expressions: _col0 (type: double) sort order: + @@ -1263,12 +1185,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - includeColumns: [0] - dataColumns: _col0:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -1463,7 +1379,7 @@ POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -4.303895780321011 1163.8972588605056 1163.8972588605056 1164.0241556397098 34.11593848717203 34.11593848717203 34.11593848717203 34.11779822379677 WARNING: Comparing a bigint and a double may result in a loss of precision. -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(cbigint), (-(AVG(cbigint))), (-6432 + AVG(cbigint)), @@ -1490,7 +1406,7 @@ WHERE (((cstring2 LIKE '%b%') AND ((cboolean2 = 1) AND (3569 = ctinyint)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(cbigint), (-(AVG(cbigint))), (-6432 + AVG(cbigint)), @@ -1535,7 +1451,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1583,12 +1498,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 7, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(13,3), double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -30964,7 +30873,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col1 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 6144 Data size: 73728 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/parquet_vectorization_1.q.out ql/src/test/results/clientpositive/parquet_vectorization_1.q.out index b72982c1f8..3a22da6f7c 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_1.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_1.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT VAR_POP(ctinyint), (VAR_POP(ctinyint) / -26.28), SUM(cfloat), @@ -19,7 +19,7 @@ WHERE (((cdouble > ctinyint) OR ((cint > cbigint) OR (cboolean1 < 0)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT VAR_POP(ctinyint), (VAR_POP(ctinyint) / -26.28), SUM(cfloat), @@ -58,7 +58,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -106,12 +105,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 2, 3, 4, 5, 10, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_10.q.out ql/src/test/results/clientpositive/parquet_vectorization_10.q.out index d174a0070d..c6f2e23113 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_10.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_10.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdouble, ctimestamp1, ctinyint, @@ -22,7 +22,7 @@ WHERE (((cstring2 <= '10') AND ((csmallint = 9763215.5639) OR (cstring1 LIKE '%a')))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdouble, ctimestamp1, ctinyint, @@ -64,7 +64,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -101,12 +100,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 3, 5, 6, 7, 8, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, decimal(6,2), decimal(11,4), double, double, double, double, double, bigint, bigint, bigint, double, double, double] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/parquet_vectorization_11.q.out ql/src/test/results/clientpositive/parquet_vectorization_11.q.out index 5048ad7560..cd60179795 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_11.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_11.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cboolean1, cdouble, @@ -13,7 +13,7 @@ WHERE ((cstring2 = cstring1) OR ((ctimestamp1 IS NULL) AND (cstring1 LIKE '%a'))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cboolean1, cdouble, @@ -46,7 +46,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -83,12 +82,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [1, 5, 6, 7, 8, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, double, double, double, double] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/parquet_vectorization_12.q.out ql/src/test/results/clientpositive/parquet_vectorization_12.q.out index 83ca333522..cbf7c25e46 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_12.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_12.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cbigint, cboolean1, cstring1, @@ -30,7 +30,7 @@ WHERE (((ctimestamp1 IS NULL) GROUP BY cbigint, cboolean1, cstring1, ctimestamp1, cdouble ORDER BY ctimestamp1, cdouble, cbigint, cstring1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cbigint, cboolean1, cstring1, @@ -81,7 +81,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -133,12 +132,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 3, 5, 6, 8, 10, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -167,7 +160,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:bigint, 1:_col1:boolean, 2:_col2:string, 3:_col3:double, 4:_col4:double, 5:_col5:bigint, 6:_col6:bigint, 7:_col7:bigint, 8:_col8:double, 9:_col9:double, 10:_col10:double, 11:_col11:double, 12:_col12:double, 13:_col13:decimal(22,2), 14:_col14:bigint, 15:_col15:double, 16:_col17:double, 17:_col18:double, 18:_col19:double] Reduce Output Operator key expressions: _col3 (type: double), _col0 (type: bigint), _col2 (type: string) sort order: +++ @@ -188,12 +180,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 19 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18] - dataColumns: _col0:bigint, _col1:boolean, _col2:string, _col3:double, _col4:double, _col5:bigint, _col6:bigint, _col7:bigint, _col8:double, _col9:double, _col10:double, _col11:double, _col12:double, _col13:decimal(22,2), _col14:bigint, _col15:double, _col17:double, _col18:double, _col19:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_13.q.out ql/src/test/results/clientpositive/parquet_vectorization_13.q.out index 6459e29e81..e60548cb77 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_13.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_13.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, ctimestamp1, @@ -31,7 +31,7 @@ GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16 LIMIT 40 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, ctimestamp1, @@ -83,7 +83,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -135,12 +134,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 4, 5, 6, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, decimal(11,4), double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -169,7 +162,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:boolean, 1:_col1:tinyint, 2:_col2:timestamp, 3:_col3:float, 4:_col4:string, 5:_col5:tinyint, 6:_col6:tinyint, 7:_col7:tinyint, 8:_col8:double, 9:_col9:double, 10:_col10:double, 11:_col11:float, 12:_col12:double, 13:_col13:double, 14:_col14:double, 15:_col15:decimal(7,3), 16:_col16:double, 17:_col17:double, 18:_col18:float, 19:_col19:double, 20:_col20:tinyint] Reduce Output Operator key expressions: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string), _col5 (type: tinyint), _col6 (type: tinyint), _col7 (type: tinyint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: float), _col12 (type: double), _col13 (type: double), _col14 (type: double), _col15 (type: decimal(7,3)), _col16 (type: double), _col17 (type: double), _col18 (type: float), _col19 (type: double), _col20 (type: tinyint) sort order: +++++++++++++++++++++ @@ -190,12 +182,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 21 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] - dataColumns: _col0:boolean, _col1:tinyint, _col2:timestamp, _col3:float, _col4:string, _col5:tinyint, _col6:tinyint, _col7:tinyint, _col8:double, _col9:double, _col10:double, _col11:float, _col12:double, _col13:double, _col14:double, _col15:decimal(7,3), _col16:double, _col17:double, _col18:float, _col19:double, _col20:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -290,46 +276,46 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -NULL -55 1969-12-31 16:00:11.38 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -55 1969-12-31 16:00:11.751 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -56 1969-12-31 16:00:13.602 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:13.958 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:15.038 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -57 1969-12-31 16:00:11.451 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:11.883 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:12.626 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:13.578 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:15.39 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -58 1969-12-31 16:00:12.065 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.683 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.948 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:14.066 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:15.658 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -59 1969-12-31 16:00:12.008 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.15 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.625 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.296 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.861 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -60 1969-12-31 16:00:11.504 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.641 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.996 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:12.779 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -61 1969-12-31 16:00:11.842 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:12.454 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:14.192 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:16.558 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -55 1969-12-31 16:00:12.297 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -55 1969-12-31 16:00:13.15 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -56 1969-12-31 16:00:11.242 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:13.534 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.038 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.689 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:16.37 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -57 1969-12-31 16:00:11.534 -57.0 cvLH6Eat2yFsyy7p 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:13.365 -57.0 1cGVWH7n1QU 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:14.225 -57.0 821UdmGbkEf4j 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -58 1969-12-31 16:00:12.918 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:13.209 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:14.933 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -59 1969-12-31 16:00:11.065 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.109 -59.0 1cGVWH7n1QU 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.231 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.758 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:12.227 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.242 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.278 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.069 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.125 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -60 1969-12-31 16:00:11.849 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.223 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.291 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:13.567 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:15.188 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:16.165 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.325 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.694 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, @@ -608,43 +594,43 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -NULL -61 1969-12-31 16:00:00.142 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:02.698 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:03.049 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.165 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.977 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:00.037 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.22 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.515 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.734 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:02.373 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:03.85 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:08.198 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.025 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.889 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.069 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.225 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.485 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:01.843 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:03.552 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:06.852 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:07.375 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:10.205 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:00.199 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:00.29 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:01.785 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:03.944 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:05.997 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:10.858 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -61 1969-12-31 16:00:00.554 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.339 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.497 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:03.742 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:07.538 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:09.809 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:10.713 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:00.337 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.659 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.684 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:01.419 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.123 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.922 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:04.978 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.756 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.847 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.903 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:05.654 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:07.623 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:09.14 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 15:59:58.959 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.013 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.172 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.631 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.305 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.79 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:02.496 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:03.088 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:04.662 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:10.273 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 diff --git ql/src/test/results/clientpositive/parquet_vectorization_14.q.out ql/src/test/results/clientpositive/parquet_vectorization_14.q.out index 1eab96280b..a7fdfdaa85 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_14.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_14.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cfloat, cstring1, @@ -31,7 +31,7 @@ WHERE (((ctinyint <= cbigint) GROUP BY ctimestamp1, cfloat, cstring1, cboolean1, cdouble ORDER BY cstring1, cfloat, cdouble, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cfloat, cstring1, @@ -83,7 +83,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -135,12 +134,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 2, 3, 4, 5, 6, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -169,7 +162,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:timestamp, 1:_col1:float, 2:_col2:string, 3:_col3:boolean, 4:_col4:double, 5:_col5:double, 6:_col6:double, 7:_col7:double, 8:_col8:float, 9:_col9:float, 10:_col10:float, 11:_col11:float, 12:_col12:double, 13:_col13:double, 14:_col14:bigint, 15:_col15:double, 16:_col16:double, 17:_col17:double, 18:_col18:double, 19:_col19:double, 20:_col20:double, 21:_col21:double] Reduce Output Operator key expressions: _col2 (type: string), _col1 (type: float), _col4 (type: double), _col0 (type: timestamp) sort order: ++++ @@ -190,12 +182,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 22 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21] - dataColumns: _col0:timestamp, _col1:float, _col2:string, _col3:boolean, _col4:double, _col5:double, _col6:double, _col7:double, _col8:float, _col9:float, _col10:float, _col11:float, _col12:double, _col13:double, _col14:bigint, _col15:double, _col16:double, _col17:double, _col18:double, _col19:double, _col20:double, _col21:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_15.q.out ql/src/test/results/clientpositive/parquet_vectorization_15.q.out index 2d306cf6c1..6974ee8906 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_15.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_15.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cboolean1, cdouble, @@ -29,7 +29,7 @@ WHERE (((cstring2 LIKE '%ss%') GROUP BY cfloat, cboolean1, cdouble, cstring1, ctinyint, cint, ctimestamp1 ORDER BY cfloat, cboolean1, cdouble, cstring1, ctinyint, cint, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cboolean1, cdouble, @@ -79,7 +79,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -131,12 +130,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 4, 5, 6, 7, 8, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double, double] Reduce Vectorization: enabled: false enableConditionsNotMet: hive.vectorized.execution.reduce.enabled IS false, hive.execution.engine mr IN [tez, spark] IS false @@ -164,7 +157,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:float, 1:_col1:boolean, 2:_col2:double, 3:_col3:string, 4:_col4:tinyint, 5:_col5:int, 6:_col6:timestamp, 7:_col7:double, 8:_col8:decimal(13,2), 9:_col9:double, 10:_col10:double, 11:_col11:float, 12:_col12:double, 13:_col13:double, 14:_col14:double, 15:_col15:tinyint, 16:_col16:double, 17:_col17:float, 18:_col18:int, 19:_col19:decimal(13,2), 20:_col20:double] Reduce Output Operator key expressions: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) sort order: +++++++ @@ -185,12 +177,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 21 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] - dataColumns: _col0:float, _col1:boolean, _col2:double, _col3:string, _col4:tinyint, _col5:int, _col6:timestamp, _col7:double, _col8:decimal(13,2), _col9:double, _col10:double, _col11:float, _col12:double, _col13:double, _col14:double, _col15:tinyint, _col16:double, _col17:float, _col18:int, _col19:decimal(13,2), _col20:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsNotMet: hive.vectorized.execution.reduce.enabled IS false, hive.execution.engine mr IN [tez, spark] IS false diff --git ql/src/test/results/clientpositive/parquet_vectorization_16.q.out ql/src/test/results/clientpositive/parquet_vectorization_16.q.out index fa317902b4..fbe86a0b27 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_16.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_16.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -18,7 +18,7 @@ WHERE ((cstring2 LIKE '%b%') OR (cstring1 < 'a'))) GROUP BY cstring1, cdouble, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -56,7 +56,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -108,12 +107,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [5, 6, 7, 8] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_17.q.out ql/src/test/results/clientpositive/parquet_vectorization_17.q.out index 9fa2d72ff8..eed7d9febd 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_17.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_17.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cstring1, cint, @@ -22,7 +22,7 @@ WHERE (((cbigint > -23) OR (cfloat = cdouble)))) ORDER BY cbigint, cfloat PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cstring1, cint, @@ -64,7 +64,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -101,12 +100,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 8] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(13,3), double, double, bigint, double, double, double, double, decimal(19,0), decimal(11,4), double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_2.q.out ql/src/test/results/clientpositive/parquet_vectorization_2.q.out index a5af87ccdc..92fc064df9 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_2.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_2.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(csmallint), (AVG(csmallint) % -563), (AVG(csmallint) + 762), @@ -21,7 +21,7 @@ WHERE (((ctimestamp1 < ctimestamp2) AND ((-10669 != ctimestamp2) OR (359 > cint)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(csmallint), (AVG(csmallint) % -563), (AVG(csmallint) + 762), @@ -62,7 +62,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -110,12 +109,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 7, 8, 9] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_3.q.out ql/src/test/results/clientpositive/parquet_vectorization_3.q.out index ffdb68e62b..fe3db8db53 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_3.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_3.q.out @@ -1,5 +1,5 @@ WARNING: Comparing a bigint and a double may result in a loss of precision. -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT STDDEV_SAMP(csmallint), (STDDEV_SAMP(csmallint) - 10.175), STDDEV_POP(ctinyint), @@ -24,7 +24,7 @@ WHERE (((cint <= cfloat) AND ((79.553 <= csmallint) AND (ctimestamp1 > ctimestamp2)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT STDDEV_SAMP(csmallint), (STDDEV_SAMP(csmallint) - 10.175), STDDEV_POP(ctinyint), @@ -67,7 +67,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -115,12 +114,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 8, 9] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, decimal(22,3), decimal(8,3), double, double, double, double, double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_4.q.out ql/src/test/results/clientpositive/parquet_vectorization_4.q.out index 998fdb3576..a8e58be484 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_4.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_4.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cint), (SUM(cint) * -563), (-3728 + SUM(cint)), @@ -21,7 +21,7 @@ WHERE (((csmallint >= cint) AND ((ctinyint != cbigint) OR (-3728 >= cdouble)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cint), (SUM(cint) * -563), (-3728 + SUM(cint)), @@ -62,7 +62,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -110,12 +109,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_5.q.out ql/src/test/results/clientpositive/parquet_vectorization_5.q.out index 4e490ad871..7dee8cae39 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_5.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_5.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MAX(csmallint), (MAX(csmallint) * -75), COUNT(*), @@ -18,7 +18,7 @@ WHERE (((cboolean2 IS NOT NULL) AND ((ctimestamp2 IS NOT NULL) AND (cstring2 LIKE 'a')))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MAX(csmallint), (MAX(csmallint) * -75), COUNT(*), @@ -56,7 +56,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -103,12 +102,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 5, 6, 7, 9, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_6.q.out ql/src/test/results/clientpositive/parquet_vectorization_6.q.out index fa649b2378..85b075666f 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_6.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_6.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cfloat, cstring1, @@ -19,7 +19,7 @@ WHERE ((ctinyint != 0) AND ((cstring2 LIKE '%a') OR (cfloat <= -257)))))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cfloat, cstring1, @@ -58,7 +58,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -95,12 +94,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 10, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, bigint, double, double, double, bigint, double, bigint, bigint, bigint] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/parquet_vectorization_7.q.out ql/src/test/results/clientpositive/parquet_vectorization_7.q.out index 0630e8b08c..93fcfb7937 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_7.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_7.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, csmallint, @@ -25,7 +25,7 @@ WHERE ((ctinyint != 0) ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9 LIMIT 25 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, csmallint, @@ -70,7 +70,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -107,12 +106,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 5, 6, 7, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -195,31 +188,31 @@ LIMIT 25 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -NULL -2118149242 -7196 56 1969-12-31 15:59:50.462 NULL -4236298484 0 7196 -56 -39 -15242201945432 NULL -56 0 -NULL -2121399625 -7196 27 1969-12-31 15:59:50.046 NULL -4242799250 0 7196 -27 -10 -15265591701500 NULL -27 0 -NULL -2124802690 -7196 -6 1969-12-31 15:59:57.92 NULL -4249605380 0 7196 6 23 -15290080157240 NULL 6 0 -NULL -2128720310 -7196 -52 1969-12-31 15:59:45.978 NULL -4257440620 0 7196 52 69 -15318271350760 NULL 52 0 -NULL -2132232110 -200 60 1969-12-31 15:59:47.019 NULL -4264464220 -200 200 -60 -43 -426446422000 NULL -60 0 -NULL -2132536965 -7196 9 1969-12-31 15:59:46 NULL -4265073930 0 7196 -9 8 -15345736000140 NULL -9 0 -NULL -2135141157 -7196 50 1969-12-31 15:59:50.192 NULL -4270282314 0 7196 -50 -33 -15364475765772 NULL -50 0 -NULL -2137537679 -7196 -25 1969-12-31 15:59:50.136 NULL -4275075358 0 7196 25 42 -15381721138084 NULL 25 0 -NULL -2145481991 -7196 56 1969-12-31 15:59:55.667 NULL -4290963982 0 7196 -56 -39 -15438888407236 NULL -56 0 -NULL NULL -200 -36 1969-12-31 15:59:57.241 NULL NULL -200 200 36 53 NULL NULL 36 0 -NULL NULL -200 -43 1969-12-31 15:59:53.783 NULL NULL -200 200 43 60 NULL NULL 43 0 -NULL NULL -200 -58 1969-12-31 15:59:51.115 NULL NULL -200 200 58 75 NULL NULL 58 0 -NULL NULL -200 22 1969-12-31 15:59:50.109 NULL NULL -200 200 -22 -5 NULL NULL -22 0 -NULL NULL -200 3 1969-12-31 15:59:50.489 NULL NULL -200 200 -3 14 NULL NULL -3 0 -NULL NULL -200 43 1969-12-31 15:59:57.003 NULL NULL -200 200 -43 -26 NULL NULL -43 0 -NULL NULL -200 53 1969-12-31 15:59:49.46 NULL NULL -200 200 -53 -36 NULL NULL -53 0 -NULL NULL -200 9 1969-12-31 15:59:44.108 NULL NULL -200 200 -9 8 NULL NULL -9 0 -NULL NULL -7196 -38 1969-12-31 15:59:53.503 NULL NULL 0 7196 38 55 NULL NULL 38 0 -NULL NULL -7196 -49 1969-12-31 15:59:51.009 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -49 1969-12-31 15:59:52.052 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -50 1969-12-31 15:59:52.424 NULL NULL 0 7196 50 67 NULL NULL 50 0 -NULL NULL -7196 -61 1969-12-31 15:59:44.823 NULL NULL 0 7196 61 78 NULL NULL 61 0 -NULL NULL -7196 1 1969-12-31 15:59:48.361 NULL NULL 0 7196 -1 16 NULL NULL -1 0 -NULL NULL -7196 14 1969-12-31 15:59:50.291 NULL NULL 0 7196 -14 3 NULL NULL -14 0 -NULL NULL -7196 22 1969-12-31 15:59:52.699 NULL NULL 0 7196 -22 -5 NULL NULL -22 0 +true NULL -15892 29 1969-12-31 15:59:57.937 821UdmGbkEf4j NULL -215 15892 -29 -12 NULL 171 -29 0 +true NULL -15899 50 1969-12-31 15:59:46.926 821UdmGbkEf4j NULL -222 15899 -50 -33 NULL 10210 -50 0 +true NULL -15903 -2 1969-12-31 15:59:46.371 cvLH6Eat2yFsyy7p NULL -226 15903 2 19 NULL 14465 2 0 +true NULL -15920 -64 1969-12-31 15:59:51.859 cvLH6Eat2yFsyy7p NULL -243 15920 64 81 NULL 6687 64 0 +true NULL -15922 -17 1969-12-31 15:59:46.164 821UdmGbkEf4j NULL -245 15922 17 34 NULL 10851 17 0 +true NULL -15923 49 1969-12-31 15:59:47.323 cvLH6Eat2yFsyy7p NULL -246 15923 -49 -32 NULL 2628 -49 0 +true NULL -15935 -6 1969-12-31 15:59:45.859 1cGVWH7n1QU NULL -1 15935 6 23 NULL 12046 6 0 +true NULL -15948 31 1969-12-31 15:59:47.577 821UdmGbkEf4j NULL -14 15948 -31 -14 NULL 7799 -31 0 +true NULL -15948 6 1969-12-31 15:59:49.269 1cGVWH7n1QU NULL -14 15948 -6 11 NULL 12436 -6 0 +true NULL -15980 -6 1969-12-31 15:59:54.84 1cGVWH7n1QU NULL -46 15980 6 23 NULL 14836 6 0 +true NULL -15999 4 1969-12-31 15:59:46.491 1cGVWH7n1QU NULL -65 15999 -4 13 NULL 1231 -4 0 +true NULL -16017 -21 1969-12-31 15:59:44.02 821UdmGbkEf4j NULL -83 16017 21 38 NULL 2282 21 0 +true NULL -16025 -42 1969-12-31 15:59:54.534 cvLH6Eat2yFsyy7p NULL -91 16025 42 59 NULL 14242 42 0 +true NULL -16036 -15 1969-12-31 15:59:58.681 1cGVWH7n1QU NULL -102 16036 15 32 NULL 7928 15 0 +true NULL -16059 -35 1969-12-31 15:59:53.038 821UdmGbkEf4j NULL -125 16059 35 52 NULL 12437 35 0 +true NULL -16076 59 1969-12-31 15:59:55.023 821UdmGbkEf4j NULL -142 16076 -59 -42 NULL 7907 -59 0 +true NULL -16122 50 1969-12-31 15:59:51.608 1cGVWH7n1QU NULL -188 16122 -50 -33 NULL 1828 -50 0 +true NULL -16123 -20 1969-12-31 15:59:51.177 1cGVWH7n1QU NULL -189 16123 20 37 NULL 2217 20 0 +true NULL -16153 35 1969-12-31 15:59:52.036 1cGVWH7n1QU NULL -219 16153 -35 -18 NULL 14817 -35 0 +true NULL -16169 5 1969-12-31 15:59:45.059 1cGVWH7n1QU NULL -235 16169 -5 12 NULL 6104 -5 0 +true NULL -16207 -4 1969-12-31 15:59:45.956 cvLH6Eat2yFsyy7p NULL -16 16207 4 21 NULL 8290 4 0 +true NULL -16221 -12 1969-12-31 15:59:45.877 1cGVWH7n1QU NULL -30 16221 12 29 NULL 1378 12 0 +true NULL -16227 2 1969-12-31 15:59:44.065 821UdmGbkEf4j NULL -36 16227 -2 15 NULL 9761 -2 0 +true NULL -16305 3 1969-12-31 15:59:43.878 1cGVWH7n1QU NULL -114 16305 -3 14 NULL 8491 -3 0 +true NULL -16339 15 1969-12-31 15:59:53.966 821UdmGbkEf4j NULL -148 16339 -15 2 NULL 12588 -15 0 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, diff --git ql/src/test/results/clientpositive/parquet_vectorization_8.q.out ql/src/test/results/clientpositive/parquet_vectorization_8.q.out index b4481bfa40..c089aab125 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_8.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_8.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cdouble, cboolean1, @@ -23,7 +23,7 @@ WHERE (((cstring2 IS NOT NULL) ORDER BY ctimestamp1, cdouble, cboolean1, cstring1, cfloat, c1, c2, c3, c4, c5, c6, c7, c8, c9 LIMIT 20 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cdouble, cboolean1, @@ -66,7 +66,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -103,12 +102,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [2, 3, 4, 5, 6, 7, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double, double, double, double, double, double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_9.q.out ql/src/test/results/clientpositive/parquet_vectorization_9.q.out index fa317902b4..fbe86a0b27 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_9.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_9.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -18,7 +18,7 @@ WHERE ((cstring2 LIKE '%b%') OR (cstring1 < 'a'))) GROUP BY cstring1, cdouble, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -56,7 +56,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -108,12 +107,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [5, 6, 7, 8] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_div0.q.out ql/src/test/results/clientpositive/parquet_vectorization_div0.q.out index 1baa650dcb..c3b85fbbcb 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_div0.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_div0.q.out @@ -270,8 +270,8 @@ from alltypesparquet where cbigint > 0 and cbigint < 100000000 order by s1, s2 l POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### --985319 NULL -0.000001217879691754650 -985319 2.0297994862577501E-4 -0.000001217879691754650 +-985319 NULL -0.000001217879691754650 -63925 0.11256941728588189 -0.000018771998435666797 0 NULL NULL 0 NULL NULL @@ -478,8 +478,8 @@ POSTHOOK: Input: default@alltypesparquet -273.0 6028764.868131869 1.0 6028764.868131869 -0.01098901098901099 -0.004395604395604396 -257.0 6404096.53307393 1.0 6404096.53307393 -0.011673151750972763 -0.004669260700389105 -250.0 6583411.236 1.0 6583411.236 -0.012 -0.0048 --247.0 NULL 1.0 NULL -0.012145748987854251 -0.004858299595141701 -247.0 -7546669.174089069 1.0 -7546669.174089069 -0.012145748987854251 -0.004858299595141701 +-247.0 NULL 1.0 NULL -0.012145748987854251 -0.004858299595141701 -246.0 NULL 1.0 NULL -0.012195121951219513 -0.004878048780487805 -237.0 NULL 1.0 NULL -0.012658227848101266 -0.005063291139240506 -236.0 NULL 1.0 NULL -0.012711864406779662 -0.005084745762711864 @@ -507,18 +507,18 @@ POSTHOOK: Input: default@alltypesparquet -132.0 NULL 1.0 NULL -0.022727272727272728 -0.00909090909090909 -129.0 1.2758548906976745E7 1.0 1.2758548906976745E7 -0.023255813953488372 -0.009302325581395349 -128.0 NULL 1.0 NULL -0.0234375 -0.009375 --126.0 NULL 1.0 NULL -0.023809523809523808 -0.009523809523809523 -126.0 -1.4793867349206349E7 1.0 -1.4793867349206349E7 -0.023809523809523808 -0.009523809523809523 +-126.0 NULL 1.0 NULL -0.023809523809523808 -0.009523809523809523 -116.0 NULL 1.0 NULL -0.02586206896551724 -0.010344827586206896 --113.0 NULL 1.0 NULL -0.02654867256637168 -0.010619469026548672 -113.0 -1.6495816690265486E7 1.0 -1.6495816690265486E7 -0.02654867256637168 -0.010619469026548672 +-113.0 NULL 1.0 NULL -0.02654867256637168 -0.010619469026548672 -96.0 NULL 1.0 NULL -0.03125 -0.012499999999999999 -94.0 -1.9830077510638297E7 1.0 -1.9830077510638297E7 -0.031914893617021274 -0.01276595744680851 -93.0 NULL 1.0 NULL -0.03225806451612903 -0.012903225806451613 -77.0 2.4513789038961038E7 1.0 2.4513789038961038E7 -0.03896103896103896 -0.015584415584415584 -69.0 2.735596747826087E7 1.0 2.735596747826087E7 -0.043478260869565216 -0.017391304347826087 --62.0 NULL 1.0 NULL -0.04838709677419355 -0.01935483870967742 -62.0 3.0444544451612905E7 1.0 3.0444544451612905E7 -0.04838709677419355 -0.01935483870967742 +-62.0 NULL 1.0 NULL -0.04838709677419355 -0.01935483870967742 -60.0 NULL 1.0 NULL -0.05 -0.02 -57.0 -3.27022330877193E7 1.0 -3.27022330877193E7 -0.05263157894736842 -0.021052631578947368 -49.0 3.35888328367347E7 1.0 3.35888328367347E7 -0.061224489795918366 -0.024489795918367346 diff --git ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out index dc9f66361c..e8fe55341b 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_limit.q.out @@ -69,10 +69,10 @@ POSTHOOK: Input: default@alltypesparquet -1887561756 10361.0 -1887561756 1839.0 -1887561756 9531.0 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble,csmallint from alltypesparquet where ctinyint is not null order by ctinyint,cdouble limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble,csmallint from alltypesparquet where ctinyint is not null order by ctinyint,cdouble limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -93,7 +93,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -130,12 +129,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -190,10 +183,10 @@ POSTHOOK: Input: default@alltypesparquet -64 -7196.0 -7196 -64 -8080.0 -8080 -64 -9842.0 -9842 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,avg(cdouble + 1) from alltypesparquet group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,avg(cdouble + 1) from alltypesparquet group by ctinyint order by ctinyint limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -213,7 +206,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint), (cdouble + 1.0D) (type: double) outputColumnNames: _col0, _col1 @@ -259,12 +251,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -305,6 +291,7 @@ POSTHOOK: query: select ctinyint,avg(cdouble + 1) from alltypesparquet group by POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### +-45 326.44444444444446 -46 3033.55 -47 -574.6428571428571 -48 1672.909090909091 @@ -324,11 +311,10 @@ POSTHOOK: Input: default@alltypesparquet -62 245.69387755102042 -63 2178.7272727272725 -64 373.52941176470586 -NULL 9370.0945309795 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select distinct(ctinyint) from alltypesparquet limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select distinct(ctinyint) from alltypesparquet limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -348,7 +334,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint @@ -390,12 +375,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -451,10 +430,10 @@ POSTHOOK: Input: default@alltypesparquet -63 -64 NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint, count(distinct(cdouble)) from alltypesparquet group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint, count(distinct(cdouble)) from alltypesparquet group by ctinyint order by ctinyint limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -530,6 +509,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesparquet POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -549,11 +529,10 @@ POSTHOOK: Input: default@alltypesparquet -62 27 -63 19 -64 24 -NULL 2932 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble from alltypesparquet order by ctinyint limit 0 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble from alltypesparquet order by ctinyint limit 0 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -578,10 +557,10 @@ POSTHOOK: query: select ctinyint,cdouble from alltypesparquet order by ctinyint POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select cdouble, sum(ctinyint) as sum from alltypesparquet where ctinyint is not null group by cdouble order by sum, cdouble limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select cdouble, sum(ctinyint) as sum from alltypesparquet where ctinyint is not null group by cdouble order by sum, cdouble limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -603,7 +582,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -646,12 +624,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true @@ -676,7 +648,6 @@ STAGE PLANS: TableScan TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:_col0:double, 1:_col1:bigint] Reduce Output Operator key expressions: _col1 (type: bigint), _col0 (type: double) sort order: ++ @@ -697,12 +668,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - includeColumns: [0, 1] - dataColumns: _col0:double, _col1:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Vectorization: enabled: false enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true diff --git ql/src/test/results/clientpositive/parquet_vectorization_part_project.q.out ql/src/test/results/clientpositive/parquet_vectorization_part_project.q.out index 0786685a3b..c8a94d587c 100644 --- ql/src/test/results/clientpositive/parquet_vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/parquet_vectorization_part_project.q.out @@ -122,13 +122,13 @@ POSTHOOK: Input: default@alltypesparquet_part_n0 POSTHOOK: Input: default@alltypesparquet_part_n0@ds=2011 POSTHOOK: Input: default@alltypesparquet_part_n0@ds=2012 #### A masked pattern was here #### -NULL -NULL --15863.0 --15863.0 --14988.0 --14988.0 --14646.0 --14646.0 --14236.0 --14236.0 +-15990.0 +-15990.0 +-15918.0 +-15918.0 +-15890.0 +-15890.0 +-14305.0 +-14305.0 +-12514.0 +-12514.0 diff --git ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out index 6fc8b06547..1b576ee10a 100644 --- ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out +++ ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out @@ -49,1003 +49,1003 @@ POSTHOOK: Input: default@partition_vs_table POSTHOOK: Input: default@partition_vs_table@ds=100 POSTHOOK: Input: default@partition_vs_table@ds=101 #### A masked pattern was here #### -0 val_0 NULL -0 val_0 NULL -0 val_0 NULL 0 val_0 0 0 val_0 0 0 val_0 0 -10 val_10 NULL +0 val_0 NULL +0 val_0 NULL +0 val_0 NULL 10 val_10 10 -100 val_100 NULL -100 val_100 NULL +10 val_10 NULL 100 val_100 100 100 val_100 100 -103 val_103 NULL -103 val_103 NULL +100 val_100 NULL +100 val_100 NULL 103 val_103 103 103 val_103 103 -104 val_104 NULL -104 val_104 NULL +103 val_103 NULL +103 val_103 NULL 104 val_104 104 104 val_104 104 -105 val_105 NULL +104 val_104 NULL +104 val_104 NULL 105 val_105 105 -11 val_11 NULL +105 val_105 NULL 11 val_11 11 -111 val_111 NULL +11 val_11 NULL 111 val_111 111 -113 val_113 NULL -113 val_113 NULL +111 val_111 NULL 113 val_113 113 113 val_113 113 -114 val_114 NULL +113 val_113 NULL +113 val_113 NULL 114 val_114 114 -116 val_116 NULL +114 val_114 NULL 116 val_116 116 -118 val_118 NULL -118 val_118 NULL +116 val_116 NULL 118 val_118 118 118 val_118 118 -119 val_119 NULL -119 val_119 NULL -119 val_119 NULL +118 val_118 NULL +118 val_118 NULL 119 val_119 119 119 val_119 119 119 val_119 119 -12 val_12 NULL -12 val_12 NULL +119 val_119 NULL +119 val_119 NULL +119 val_119 NULL 12 val_12 12 12 val_12 12 -120 val_120 NULL -120 val_120 NULL +12 val_12 NULL +12 val_12 NULL 120 val_120 120 120 val_120 120 -125 val_125 NULL -125 val_125 NULL +120 val_120 NULL +120 val_120 NULL 125 val_125 125 125 val_125 125 -126 val_126 NULL +125 val_125 NULL +125 val_125 NULL 126 val_126 126 -128 val_128 NULL -128 val_128 NULL -128 val_128 NULL +126 val_126 NULL 128 val_128 128 128 val_128 128 128 val_128 128 -129 val_129 NULL -129 val_129 NULL +128 val_128 NULL +128 val_128 NULL +128 val_128 NULL 129 val_129 129 129 val_129 129 -131 val_131 NULL +129 val_129 NULL +129 val_129 NULL 131 val_131 131 -133 val_133 NULL +131 val_131 NULL 133 val_133 133 -134 val_134 NULL -134 val_134 NULL +133 val_133 NULL 134 val_134 134 134 val_134 134 -136 val_136 NULL +134 val_134 NULL +134 val_134 NULL 136 val_136 136 -137 val_137 NULL -137 val_137 NULL +136 val_136 NULL 137 val_137 137 137 val_137 137 -138 val_138 NULL -138 val_138 NULL -138 val_138 NULL -138 val_138 NULL +137 val_137 NULL +137 val_137 NULL 138 val_138 138 138 val_138 138 138 val_138 138 138 val_138 138 -143 val_143 NULL +138 val_138 NULL +138 val_138 NULL +138 val_138 NULL +138 val_138 NULL 143 val_143 143 -145 val_145 NULL +143 val_143 NULL 145 val_145 145 -146 val_146 NULL -146 val_146 NULL +145 val_145 NULL 146 val_146 146 146 val_146 146 -149 val_149 NULL -149 val_149 NULL +146 val_146 NULL +146 val_146 NULL 149 val_149 149 149 val_149 149 -15 val_15 NULL -15 val_15 NULL +149 val_149 NULL +149 val_149 NULL 15 val_15 15 15 val_15 15 -150 val_150 NULL +15 val_15 NULL +15 val_15 NULL 150 val_150 150 -152 val_152 NULL -152 val_152 NULL +150 val_150 NULL 152 val_152 152 152 val_152 152 -153 val_153 NULL +152 val_152 NULL +152 val_152 NULL 153 val_153 153 -155 val_155 NULL +153 val_153 NULL 155 val_155 155 -156 val_156 NULL +155 val_155 NULL 156 val_156 156 -157 val_157 NULL +156 val_156 NULL 157 val_157 157 -158 val_158 NULL +157 val_157 NULL 158 val_158 158 -160 val_160 NULL +158 val_158 NULL 160 val_160 160 -162 val_162 NULL +160 val_160 NULL 162 val_162 162 -163 val_163 NULL +162 val_162 NULL 163 val_163 163 -164 val_164 NULL -164 val_164 NULL +163 val_163 NULL 164 val_164 164 164 val_164 164 -165 val_165 NULL -165 val_165 NULL +164 val_164 NULL +164 val_164 NULL 165 val_165 165 165 val_165 165 -166 val_166 NULL +165 val_165 NULL +165 val_165 NULL 166 val_166 166 -167 val_167 NULL -167 val_167 NULL -167 val_167 NULL +166 val_166 NULL 167 val_167 167 167 val_167 167 167 val_167 167 -168 val_168 NULL +167 val_167 NULL +167 val_167 NULL +167 val_167 NULL 168 val_168 168 -169 val_169 NULL -169 val_169 NULL -169 val_169 NULL -169 val_169 NULL +168 val_168 NULL 169 val_169 169 169 val_169 169 169 val_169 169 169 val_169 169 -17 val_17 NULL +169 val_169 NULL +169 val_169 NULL +169 val_169 NULL +169 val_169 NULL 17 val_17 17 -170 val_170 NULL +17 val_17 NULL 170 val_170 170 -172 val_172 NULL -172 val_172 NULL +170 val_170 NULL 172 val_172 172 172 val_172 172 -174 val_174 NULL -174 val_174 NULL +172 val_172 NULL +172 val_172 NULL 174 val_174 174 174 val_174 174 -175 val_175 NULL -175 val_175 NULL +174 val_174 NULL +174 val_174 NULL 175 val_175 175 175 val_175 175 -176 val_176 NULL -176 val_176 NULL +175 val_175 NULL +175 val_175 NULL 176 val_176 176 176 val_176 176 -177 val_177 NULL +176 val_176 NULL +176 val_176 NULL 177 val_177 177 -178 val_178 NULL +177 val_177 NULL 178 val_178 178 -179 val_179 NULL -179 val_179 NULL +178 val_178 NULL 179 val_179 179 179 val_179 179 -18 val_18 NULL -18 val_18 NULL +179 val_179 NULL +179 val_179 NULL 18 val_18 18 18 val_18 18 -180 val_180 NULL +18 val_18 NULL +18 val_18 NULL 180 val_180 180 -181 val_181 NULL +180 val_180 NULL 181 val_181 181 -183 val_183 NULL +181 val_181 NULL 183 val_183 183 -186 val_186 NULL +183 val_183 NULL 186 val_186 186 -187 val_187 NULL -187 val_187 NULL -187 val_187 NULL +186 val_186 NULL 187 val_187 187 187 val_187 187 187 val_187 187 -189 val_189 NULL +187 val_187 NULL +187 val_187 NULL +187 val_187 NULL 189 val_189 189 -19 val_19 NULL +189 val_189 NULL 19 val_19 19 -190 val_190 NULL +19 val_19 NULL 190 val_190 190 -191 val_191 NULL -191 val_191 NULL +190 val_190 NULL 191 val_191 191 191 val_191 191 -192 val_192 NULL +191 val_191 NULL +191 val_191 NULL 192 val_192 192 -193 val_193 NULL -193 val_193 NULL -193 val_193 NULL +192 val_192 NULL 193 val_193 193 193 val_193 193 193 val_193 193 -194 val_194 NULL +193 val_193 NULL +193 val_193 NULL +193 val_193 NULL 194 val_194 194 -195 val_195 NULL -195 val_195 NULL +194 val_194 NULL 195 val_195 195 195 val_195 195 -196 val_196 NULL +195 val_195 NULL +195 val_195 NULL 196 val_196 196 -197 val_197 NULL -197 val_197 NULL +196 val_196 NULL 197 val_197 197 197 val_197 197 -199 val_199 NULL -199 val_199 NULL -199 val_199 NULL +197 val_197 NULL +197 val_197 NULL 199 val_199 199 199 val_199 199 199 val_199 199 -2 val_2 NULL +199 val_199 NULL +199 val_199 NULL +199 val_199 NULL 2 val_2 2 -20 val_20 NULL +2 val_2 NULL 20 val_20 20 -200 val_200 NULL -200 val_200 NULL +20 val_20 NULL 200 val_200 200 200 val_200 200 -201 val_201 NULL +200 val_200 NULL +200 val_200 NULL 201 val_201 201 -202 val_202 NULL +201 val_201 NULL 202 val_202 202 -203 val_203 NULL -203 val_203 NULL +202 val_202 NULL 203 val_203 203 203 val_203 203 -205 val_205 NULL -205 val_205 NULL +203 val_203 NULL +203 val_203 NULL 205 val_205 205 205 val_205 205 -207 val_207 NULL -207 val_207 NULL +205 val_205 NULL +205 val_205 NULL 207 val_207 207 207 val_207 207 -208 val_208 NULL -208 val_208 NULL -208 val_208 NULL +207 val_207 NULL +207 val_207 NULL 208 val_208 208 208 val_208 208 208 val_208 208 -209 val_209 NULL -209 val_209 NULL +208 val_208 NULL +208 val_208 NULL +208 val_208 NULL 209 val_209 209 209 val_209 209 -213 val_213 NULL -213 val_213 NULL +209 val_209 NULL +209 val_209 NULL 213 val_213 213 213 val_213 213 -214 val_214 NULL +213 val_213 NULL +213 val_213 NULL 214 val_214 214 -216 val_216 NULL -216 val_216 NULL +214 val_214 NULL 216 val_216 216 216 val_216 216 -217 val_217 NULL -217 val_217 NULL +216 val_216 NULL +216 val_216 NULL 217 val_217 217 217 val_217 217 -218 val_218 NULL +217 val_217 NULL +217 val_217 NULL 218 val_218 218 -219 val_219 NULL -219 val_219 NULL +218 val_218 NULL 219 val_219 219 219 val_219 219 -221 val_221 NULL -221 val_221 NULL +219 val_219 NULL +219 val_219 NULL 221 val_221 221 221 val_221 221 -222 val_222 NULL +221 val_221 NULL +221 val_221 NULL 222 val_222 222 -223 val_223 NULL -223 val_223 NULL +222 val_222 NULL 223 val_223 223 223 val_223 223 -224 val_224 NULL -224 val_224 NULL +223 val_223 NULL +223 val_223 NULL 224 val_224 224 224 val_224 224 -226 val_226 NULL +224 val_224 NULL +224 val_224 NULL 226 val_226 226 -228 val_228 NULL +226 val_226 NULL 228 val_228 228 -229 val_229 NULL -229 val_229 NULL +228 val_228 NULL 229 val_229 229 229 val_229 229 -230 val_230 NULL -230 val_230 NULL -230 val_230 NULL -230 val_230 NULL -230 val_230 NULL +229 val_229 NULL +229 val_229 NULL 230 val_230 230 230 val_230 230 230 val_230 230 230 val_230 230 230 val_230 230 -233 val_233 NULL -233 val_233 NULL +230 val_230 NULL +230 val_230 NULL +230 val_230 NULL +230 val_230 NULL +230 val_230 NULL 233 val_233 233 233 val_233 233 -235 val_235 NULL +233 val_233 NULL +233 val_233 NULL 235 val_235 235 -237 val_237 NULL -237 val_237 NULL +235 val_235 NULL 237 val_237 237 237 val_237 237 -238 val_238 NULL -238 val_238 NULL +237 val_237 NULL +237 val_237 NULL 238 val_238 238 238 val_238 238 -239 val_239 NULL -239 val_239 NULL +238 val_238 NULL +238 val_238 NULL 239 val_239 239 239 val_239 239 -24 val_24 NULL -24 val_24 NULL +239 val_239 NULL +239 val_239 NULL 24 val_24 24 24 val_24 24 -241 val_241 NULL +24 val_24 NULL +24 val_24 NULL 241 val_241 241 -242 val_242 NULL -242 val_242 NULL +241 val_241 NULL 242 val_242 242 242 val_242 242 -244 val_244 NULL +242 val_242 NULL +242 val_242 NULL 244 val_244 244 -247 val_247 NULL +244 val_244 NULL 247 val_247 247 -248 val_248 NULL +247 val_247 NULL 248 val_248 248 -249 val_249 NULL +248 val_248 NULL 249 val_249 249 -252 val_252 NULL +249 val_249 NULL 252 val_252 252 -255 val_255 NULL -255 val_255 NULL +252 val_252 NULL 255 val_255 255 255 val_255 255 -256 val_256 NULL -256 val_256 NULL +255 val_255 NULL +255 val_255 NULL 256 val_256 256 256 val_256 256 -257 val_257 NULL +256 val_256 NULL +256 val_256 NULL 257 val_257 257 -258 val_258 NULL +257 val_257 NULL 258 val_258 258 -26 val_26 NULL -26 val_26 NULL +258 val_258 NULL 26 val_26 26 26 val_26 26 -260 val_260 NULL +26 val_26 NULL +26 val_26 NULL 260 val_260 260 -262 val_262 NULL +260 val_260 NULL 262 val_262 262 -263 val_263 NULL +262 val_262 NULL 263 val_263 263 -265 val_265 NULL -265 val_265 NULL +263 val_263 NULL 265 val_265 265 265 val_265 265 -266 val_266 NULL +265 val_265 NULL +265 val_265 NULL 266 val_266 266 -27 val_27 NULL +266 val_266 NULL 27 val_27 27 -272 val_272 NULL -272 val_272 NULL +27 val_27 NULL 272 val_272 272 272 val_272 272 -273 val_273 NULL -273 val_273 NULL -273 val_273 NULL +272 val_272 NULL +272 val_272 NULL 273 val_273 273 273 val_273 273 273 val_273 273 -274 val_274 NULL +273 val_273 NULL +273 val_273 NULL +273 val_273 NULL 274 val_274 274 -275 val_275 NULL +274 val_274 NULL 275 val_275 275 -277 val_277 NULL -277 val_277 NULL -277 val_277 NULL -277 val_277 NULL +275 val_275 NULL 277 val_277 277 277 val_277 277 277 val_277 277 277 val_277 277 -278 val_278 NULL -278 val_278 NULL +277 val_277 NULL +277 val_277 NULL +277 val_277 NULL +277 val_277 NULL 278 val_278 278 278 val_278 278 -28 val_28 NULL +278 val_278 NULL +278 val_278 NULL 28 val_28 28 -280 val_280 NULL -280 val_280 NULL +28 val_28 NULL 280 val_280 280 280 val_280 280 -281 val_281 NULL -281 val_281 NULL +280 val_280 NULL +280 val_280 NULL 281 val_281 281 281 val_281 281 -282 val_282 NULL -282 val_282 NULL +281 val_281 NULL +281 val_281 NULL 282 val_282 282 282 val_282 282 -283 val_283 NULL +282 val_282 NULL +282 val_282 NULL 283 val_283 283 -284 val_284 NULL +283 val_283 NULL 284 val_284 284 -285 val_285 NULL +284 val_284 NULL 285 val_285 285 -286 val_286 NULL +285 val_285 NULL 286 val_286 286 -287 val_287 NULL +286 val_286 NULL 287 val_287 287 -288 val_288 NULL -288 val_288 NULL +287 val_287 NULL 288 val_288 288 288 val_288 288 -289 val_289 NULL +288 val_288 NULL +288 val_288 NULL 289 val_289 289 -291 val_291 NULL +289 val_289 NULL 291 val_291 291 -292 val_292 NULL +291 val_291 NULL 292 val_292 292 -296 val_296 NULL +292 val_292 NULL 296 val_296 296 -298 val_298 NULL -298 val_298 NULL -298 val_298 NULL +296 val_296 NULL 298 val_298 298 298 val_298 298 298 val_298 298 -30 val_30 NULL +298 val_298 NULL +298 val_298 NULL +298 val_298 NULL 30 val_30 30 -302 val_302 NULL +30 val_30 NULL 302 val_302 302 -305 val_305 NULL +302 val_302 NULL 305 val_305 305 -306 val_306 NULL +305 val_305 NULL 306 val_306 306 -307 val_307 NULL -307 val_307 NULL +306 val_306 NULL 307 val_307 307 307 val_307 307 -308 val_308 NULL +307 val_307 NULL +307 val_307 NULL 308 val_308 308 -309 val_309 NULL -309 val_309 NULL +308 val_308 NULL 309 val_309 309 309 val_309 309 -310 val_310 NULL +309 val_309 NULL +309 val_309 NULL 310 val_310 310 -311 val_311 NULL -311 val_311 NULL -311 val_311 NULL +310 val_310 NULL 311 val_311 311 311 val_311 311 311 val_311 311 -315 val_315 NULL +311 val_311 NULL +311 val_311 NULL +311 val_311 NULL 315 val_315 315 -316 val_316 NULL -316 val_316 NULL -316 val_316 NULL +315 val_315 NULL 316 val_316 316 316 val_316 316 316 val_316 316 -317 val_317 NULL -317 val_317 NULL +316 val_316 NULL +316 val_316 NULL +316 val_316 NULL 317 val_317 317 317 val_317 317 -318 val_318 NULL -318 val_318 NULL -318 val_318 NULL +317 val_317 NULL +317 val_317 NULL 318 val_318 318 318 val_318 318 318 val_318 318 -321 val_321 NULL -321 val_321 NULL +318 val_318 NULL +318 val_318 NULL +318 val_318 NULL 321 val_321 321 321 val_321 321 -322 val_322 NULL -322 val_322 NULL +321 val_321 NULL +321 val_321 NULL 322 val_322 322 322 val_322 322 -323 val_323 NULL +322 val_322 NULL +322 val_322 NULL 323 val_323 323 -325 val_325 NULL -325 val_325 NULL +323 val_323 NULL 325 val_325 325 325 val_325 325 -327 val_327 NULL -327 val_327 NULL -327 val_327 NULL +325 val_325 NULL +325 val_325 NULL 327 val_327 327 327 val_327 327 327 val_327 327 -33 val_33 NULL +327 val_327 NULL +327 val_327 NULL +327 val_327 NULL 33 val_33 33 -331 val_331 NULL -331 val_331 NULL +33 val_33 NULL 331 val_331 331 331 val_331 331 -332 val_332 NULL +331 val_331 NULL +331 val_331 NULL 332 val_332 332 -333 val_333 NULL -333 val_333 NULL +332 val_332 NULL 333 val_333 333 333 val_333 333 -335 val_335 NULL +333 val_333 NULL +333 val_333 NULL 335 val_335 335 -336 val_336 NULL +335 val_335 NULL 336 val_336 336 -338 val_338 NULL +336 val_336 NULL 338 val_338 338 -339 val_339 NULL +338 val_338 NULL 339 val_339 339 -34 val_34 NULL +339 val_339 NULL 34 val_34 34 -341 val_341 NULL +34 val_34 NULL 341 val_341 341 -342 val_342 NULL -342 val_342 NULL +341 val_341 NULL 342 val_342 342 342 val_342 342 -344 val_344 NULL -344 val_344 NULL +342 val_342 NULL +342 val_342 NULL 344 val_344 344 344 val_344 344 -345 val_345 NULL +344 val_344 NULL +344 val_344 NULL 345 val_345 345 -348 val_348 NULL -348 val_348 NULL -348 val_348 NULL -348 val_348 NULL -348 val_348 NULL +345 val_345 NULL 348 val_348 348 348 val_348 348 348 val_348 348 348 val_348 348 348 val_348 348 -35 val_35 NULL -35 val_35 NULL -35 val_35 NULL +348 val_348 NULL +348 val_348 NULL +348 val_348 NULL +348 val_348 NULL +348 val_348 NULL 35 val_35 35 35 val_35 35 35 val_35 35 -351 val_351 NULL +35 val_35 NULL +35 val_35 NULL +35 val_35 NULL 351 val_351 351 -353 val_353 NULL -353 val_353 NULL +351 val_351 NULL 353 val_353 353 353 val_353 353 -356 val_356 NULL +353 val_353 NULL +353 val_353 NULL 356 val_356 356 -360 val_360 NULL +356 val_356 NULL 360 val_360 360 -362 val_362 NULL +360 val_360 NULL 362 val_362 362 -364 val_364 NULL +362 val_362 NULL 364 val_364 364 -365 val_365 NULL +364 val_364 NULL 365 val_365 365 -366 val_366 NULL +365 val_365 NULL 366 val_366 366 -367 val_367 NULL -367 val_367 NULL +366 val_366 NULL 367 val_367 367 367 val_367 367 -368 val_368 NULL +367 val_367 NULL +367 val_367 NULL 368 val_368 368 -369 val_369 NULL -369 val_369 NULL -369 val_369 NULL +368 val_368 NULL 369 val_369 369 369 val_369 369 369 val_369 369 -37 val_37 NULL -37 val_37 NULL +369 val_369 NULL +369 val_369 NULL +369 val_369 NULL 37 val_37 37 37 val_37 37 -373 val_373 NULL +37 val_37 NULL +37 val_37 NULL 373 val_373 373 -374 val_374 NULL +373 val_373 NULL 374 val_374 374 -375 val_375 NULL +374 val_374 NULL 375 val_375 375 -377 val_377 NULL +375 val_375 NULL 377 val_377 377 -378 val_378 NULL +377 val_377 NULL 378 val_378 378 -379 val_379 NULL +378 val_378 NULL 379 val_379 379 -382 val_382 NULL -382 val_382 NULL +379 val_379 NULL 382 val_382 382 382 val_382 382 -384 val_384 NULL -384 val_384 NULL -384 val_384 NULL +382 val_382 NULL +382 val_382 NULL 384 val_384 384 384 val_384 384 384 val_384 384 -386 val_386 NULL +384 val_384 NULL +384 val_384 NULL +384 val_384 NULL 386 val_386 386 -389 val_389 NULL +386 val_386 NULL 389 val_389 389 -392 val_392 NULL +389 val_389 NULL 392 val_392 392 -393 val_393 NULL +392 val_392 NULL 393 val_393 393 -394 val_394 NULL +393 val_393 NULL 394 val_394 394 -395 val_395 NULL -395 val_395 NULL +394 val_394 NULL 395 val_395 395 395 val_395 395 -396 val_396 NULL -396 val_396 NULL -396 val_396 NULL +395 val_395 NULL +395 val_395 NULL 396 val_396 396 396 val_396 396 396 val_396 396 -397 val_397 NULL -397 val_397 NULL +396 val_396 NULL +396 val_396 NULL +396 val_396 NULL 397 val_397 397 397 val_397 397 -399 val_399 NULL -399 val_399 NULL +397 val_397 NULL +397 val_397 NULL 399 val_399 399 399 val_399 399 -4 val_4 NULL +399 val_399 NULL +399 val_399 NULL 4 val_4 4 -400 val_400 NULL +4 val_4 NULL 400 val_400 400 -401 val_401 NULL -401 val_401 NULL -401 val_401 NULL -401 val_401 NULL -401 val_401 NULL +400 val_400 NULL 401 val_401 401 401 val_401 401 401 val_401 401 401 val_401 401 401 val_401 401 -402 val_402 NULL +401 val_401 NULL +401 val_401 NULL +401 val_401 NULL +401 val_401 NULL +401 val_401 NULL 402 val_402 402 -403 val_403 NULL -403 val_403 NULL -403 val_403 NULL +402 val_402 NULL 403 val_403 403 403 val_403 403 403 val_403 403 -404 val_404 NULL -404 val_404 NULL +403 val_403 NULL +403 val_403 NULL +403 val_403 NULL 404 val_404 404 404 val_404 404 -406 val_406 NULL -406 val_406 NULL -406 val_406 NULL -406 val_406 NULL +404 val_404 NULL +404 val_404 NULL 406 val_406 406 406 val_406 406 406 val_406 406 406 val_406 406 -407 val_407 NULL +406 val_406 NULL +406 val_406 NULL +406 val_406 NULL +406 val_406 NULL 407 val_407 407 -409 val_409 NULL -409 val_409 NULL -409 val_409 NULL +407 val_407 NULL 409 val_409 409 409 val_409 409 409 val_409 409 -41 val_41 NULL +409 val_409 NULL +409 val_409 NULL +409 val_409 NULL 41 val_41 41 -411 val_411 NULL +41 val_41 NULL 411 val_411 411 -413 val_413 NULL -413 val_413 NULL +411 val_411 NULL 413 val_413 413 413 val_413 413 -414 val_414 NULL -414 val_414 NULL +413 val_413 NULL +413 val_413 NULL 414 val_414 414 414 val_414 414 -417 val_417 NULL -417 val_417 NULL -417 val_417 NULL +414 val_414 NULL +414 val_414 NULL 417 val_417 417 417 val_417 417 417 val_417 417 -418 val_418 NULL +417 val_417 NULL +417 val_417 NULL +417 val_417 NULL 418 val_418 418 -419 val_419 NULL +418 val_418 NULL 419 val_419 419 -42 val_42 NULL -42 val_42 NULL +419 val_419 NULL 42 val_42 42 42 val_42 42 -421 val_421 NULL +42 val_42 NULL +42 val_42 NULL 421 val_421 421 -424 val_424 NULL -424 val_424 NULL +421 val_421 NULL 424 val_424 424 424 val_424 424 -427 val_427 NULL +424 val_424 NULL +424 val_424 NULL 427 val_427 427 -429 val_429 NULL -429 val_429 NULL +427 val_427 NULL 429 val_429 429 429 val_429 429 -43 val_43 NULL +429 val_429 NULL +429 val_429 NULL 43 val_43 43 -430 val_430 NULL -430 val_430 NULL -430 val_430 NULL +43 val_43 NULL 430 val_430 430 430 val_430 430 430 val_430 430 -431 val_431 NULL -431 val_431 NULL -431 val_431 NULL +430 val_430 NULL +430 val_430 NULL +430 val_430 NULL 431 val_431 431 431 val_431 431 431 val_431 431 -432 val_432 NULL +431 val_431 NULL +431 val_431 NULL +431 val_431 NULL 432 val_432 432 -435 val_435 NULL +432 val_432 NULL 435 val_435 435 -436 val_436 NULL +435 val_435 NULL 436 val_436 436 -437 val_437 NULL +436 val_436 NULL 437 val_437 437 -438 val_438 NULL -438 val_438 NULL -438 val_438 NULL +437 val_437 NULL 438 val_438 438 438 val_438 438 438 val_438 438 -439 val_439 NULL -439 val_439 NULL +438 val_438 NULL +438 val_438 NULL +438 val_438 NULL 439 val_439 439 439 val_439 439 -44 val_44 NULL +439 val_439 NULL +439 val_439 NULL 44 val_44 44 -443 val_443 NULL +44 val_44 NULL 443 val_443 443 -444 val_444 NULL +443 val_443 NULL 444 val_444 444 -446 val_446 NULL +444 val_444 NULL 446 val_446 446 -448 val_448 NULL +446 val_446 NULL 448 val_448 448 -449 val_449 NULL +448 val_448 NULL 449 val_449 449 -452 val_452 NULL +449 val_449 NULL 452 val_452 452 -453 val_453 NULL +452 val_452 NULL 453 val_453 453 -454 val_454 NULL -454 val_454 NULL -454 val_454 NULL +453 val_453 NULL 454 val_454 454 454 val_454 454 454 val_454 454 -455 val_455 NULL +454 val_454 NULL +454 val_454 NULL +454 val_454 NULL 455 val_455 455 -457 val_457 NULL +455 val_455 NULL 457 val_457 457 -458 val_458 NULL -458 val_458 NULL +457 val_457 NULL 458 val_458 458 458 val_458 458 -459 val_459 NULL -459 val_459 NULL +458 val_458 NULL +458 val_458 NULL 459 val_459 459 459 val_459 459 -460 val_460 NULL +459 val_459 NULL +459 val_459 NULL 460 val_460 460 -462 val_462 NULL -462 val_462 NULL +460 val_460 NULL 462 val_462 462 462 val_462 462 -463 val_463 NULL -463 val_463 NULL +462 val_462 NULL +462 val_462 NULL 463 val_463 463 463 val_463 463 -466 val_466 NULL -466 val_466 NULL -466 val_466 NULL +463 val_463 NULL +463 val_463 NULL 466 val_466 466 466 val_466 466 466 val_466 466 -467 val_467 NULL +466 val_466 NULL +466 val_466 NULL +466 val_466 NULL 467 val_467 467 -468 val_468 NULL -468 val_468 NULL -468 val_468 NULL -468 val_468 NULL +467 val_467 NULL 468 val_468 468 468 val_468 468 468 val_468 468 468 val_468 468 -469 val_469 NULL -469 val_469 NULL -469 val_469 NULL -469 val_469 NULL -469 val_469 NULL +468 val_468 NULL +468 val_468 NULL +468 val_468 NULL +468 val_468 NULL 469 val_469 469 469 val_469 469 469 val_469 469 469 val_469 469 469 val_469 469 -47 val_47 NULL +469 val_469 NULL +469 val_469 NULL +469 val_469 NULL +469 val_469 NULL +469 val_469 NULL 47 val_47 47 -470 val_470 NULL +47 val_47 NULL 470 val_470 470 -472 val_472 NULL +470 val_470 NULL 472 val_472 472 -475 val_475 NULL +472 val_472 NULL 475 val_475 475 -477 val_477 NULL +475 val_475 NULL 477 val_477 477 -478 val_478 NULL -478 val_478 NULL +477 val_477 NULL 478 val_478 478 478 val_478 478 -479 val_479 NULL +478 val_478 NULL +478 val_478 NULL 479 val_479 479 -480 val_480 NULL -480 val_480 NULL -480 val_480 NULL +479 val_479 NULL 480 val_480 480 480 val_480 480 480 val_480 480 -481 val_481 NULL +480 val_480 NULL +480 val_480 NULL +480 val_480 NULL 481 val_481 481 -482 val_482 NULL +481 val_481 NULL 482 val_482 482 -483 val_483 NULL +482 val_482 NULL 483 val_483 483 -484 val_484 NULL +483 val_483 NULL 484 val_484 484 -485 val_485 NULL +484 val_484 NULL 485 val_485 485 -487 val_487 NULL +485 val_485 NULL 487 val_487 487 -489 val_489 NULL -489 val_489 NULL -489 val_489 NULL -489 val_489 NULL +487 val_487 NULL 489 val_489 489 489 val_489 489 489 val_489 489 489 val_489 489 -490 val_490 NULL +489 val_489 NULL +489 val_489 NULL +489 val_489 NULL +489 val_489 NULL 490 val_490 490 -491 val_491 NULL +490 val_490 NULL 491 val_491 491 -492 val_492 NULL -492 val_492 NULL +491 val_491 NULL 492 val_492 492 492 val_492 492 -493 val_493 NULL +492 val_492 NULL +492 val_492 NULL 493 val_493 493 -494 val_494 NULL +493 val_493 NULL 494 val_494 494 -495 val_495 NULL +494 val_494 NULL 495 val_495 495 -496 val_496 NULL +495 val_495 NULL 496 val_496 496 -497 val_497 NULL +496 val_496 NULL 497 val_497 497 -498 val_498 NULL -498 val_498 NULL -498 val_498 NULL +497 val_497 NULL 498 val_498 498 498 val_498 498 498 val_498 498 -5 val_5 NULL -5 val_5 NULL -5 val_5 NULL +498 val_498 NULL +498 val_498 NULL +498 val_498 NULL 5 val_5 5 5 val_5 5 5 val_5 5 -51 val_51 NULL -51 val_51 NULL +5 val_5 NULL +5 val_5 NULL +5 val_5 NULL 51 val_51 51 51 val_51 51 -53 val_53 NULL +51 val_51 NULL +51 val_51 NULL 53 val_53 53 -54 val_54 NULL +53 val_53 NULL 54 val_54 54 -57 val_57 NULL +54 val_54 NULL 57 val_57 57 -58 val_58 NULL -58 val_58 NULL +57 val_57 NULL 58 val_58 58 58 val_58 58 -64 val_64 NULL +58 val_58 NULL +58 val_58 NULL 64 val_64 64 -65 val_65 NULL +64 val_64 NULL 65 val_65 65 -66 val_66 NULL +65 val_65 NULL 66 val_66 66 -67 val_67 NULL -67 val_67 NULL +66 val_66 NULL 67 val_67 67 67 val_67 67 -69 val_69 NULL +67 val_67 NULL +67 val_67 NULL 69 val_69 69 -70 val_70 NULL -70 val_70 NULL -70 val_70 NULL +69 val_69 NULL 70 val_70 70 70 val_70 70 70 val_70 70 -72 val_72 NULL -72 val_72 NULL +70 val_70 NULL +70 val_70 NULL +70 val_70 NULL 72 val_72 72 72 val_72 72 -74 val_74 NULL +72 val_72 NULL +72 val_72 NULL 74 val_74 74 -76 val_76 NULL -76 val_76 NULL +74 val_74 NULL 76 val_76 76 76 val_76 76 -77 val_77 NULL +76 val_76 NULL +76 val_76 NULL 77 val_77 77 -78 val_78 NULL +77 val_77 NULL 78 val_78 78 -8 val_8 NULL +78 val_78 NULL 8 val_8 8 -80 val_80 NULL +8 val_8 NULL 80 val_80 80 -82 val_82 NULL +80 val_80 NULL 82 val_82 82 -83 val_83 NULL -83 val_83 NULL +82 val_82 NULL 83 val_83 83 83 val_83 83 -84 val_84 NULL -84 val_84 NULL +83 val_83 NULL +83 val_83 NULL 84 val_84 84 84 val_84 84 -85 val_85 NULL +84 val_84 NULL +84 val_84 NULL 85 val_85 85 -86 val_86 NULL +85 val_85 NULL 86 val_86 86 -87 val_87 NULL +86 val_86 NULL 87 val_87 87 -9 val_9 NULL +87 val_87 NULL 9 val_9 9 -90 val_90 NULL -90 val_90 NULL -90 val_90 NULL +9 val_9 NULL 90 val_90 90 90 val_90 90 90 val_90 90 -92 val_92 NULL +90 val_90 NULL +90 val_90 NULL +90 val_90 NULL 92 val_92 92 -95 val_95 NULL -95 val_95 NULL +92 val_92 NULL 95 val_95 95 95 val_95 95 -96 val_96 NULL +95 val_95 NULL +95 val_95 NULL 96 val_96 96 -97 val_97 NULL -97 val_97 NULL +96 val_96 NULL 97 val_97 97 97 val_97 97 -98 val_98 NULL -98 val_98 NULL +97 val_97 NULL +97 val_97 NULL 98 val_98 98 98 val_98 98 +98 val_98 NULL +98 val_98 NULL diff --git ql/src/test/results/clientpositive/pcr.q.out ql/src/test/results/clientpositive/pcr.q.out index 68a58bd0cc..bdc18d3bf0 100644 --- ql/src/test/results/clientpositive/pcr.q.out +++ ql/src/test/results/clientpositive/pcr.q.out @@ -79,7 +79,7 @@ STAGE PLANS: Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -276,7 +276,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -559,7 +559,7 @@ STAGE PLANS: Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -758,7 +758,7 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -959,7 +959,7 @@ STAGE PLANS: Statistics: Num rows: 16 Data size: 128 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 16 Data size: 128 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1219,7 +1219,7 @@ STAGE PLANS: Statistics: Num rows: 33 Data size: 264 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 33 Data size: 264 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1483,7 +1483,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1661,7 +1661,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1883,7 +1883,7 @@ STAGE PLANS: Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2176,7 +2176,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2491,7 +2491,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2816,7 +2816,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2976,7 +2976,7 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 384 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 48 Data size: 384 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -3306,7 +3306,7 @@ STAGE PLANS: Statistics: Num rows: 30 Data size: 240 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 30 Data size: 240 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -4793,7 +4793,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -4929,7 +4929,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -5121,7 +5121,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/pcs.q.out ql/src/test/results/clientpositive/pcs.q.out index 5cfc0932e8..5af5d96395 100644 --- ql/src/test/results/clientpositive/pcs.q.out +++ ql/src/test/results/clientpositive/pcs.q.out @@ -113,7 +113,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1112,7 +1112,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), '2008-04-08' (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: NONE tag: -1 @@ -1138,7 +1138,7 @@ STAGE PLANS: Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), '2008-04-08' (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/perf/spark/query47.q.out ql/src/test/results/clientpositive/perf/spark/query47.q.out index 4a66d0bbd4..f829f4bb3c 100644 --- ql/src/test/results/clientpositive/perf/spark/query47.q.out +++ ql/src/test/results/clientpositive/perf/spark/query47.q.out @@ -499,7 +499,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col4, _col3, _col5, _col6 raw input shape: window functions: @@ -628,7 +628,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col3, _col2, _col4, _col5 raw input shape: window functions: @@ -718,7 +718,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col3, _col2, _col4, _col5 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/perf/spark/query49.q.out ql/src/test/results/clientpositive/perf/spark/query49.q.out index 16cc603f9a..87b84bb452 100644 --- ql/src/test/results/clientpositive/perf/spark/query49.q.out +++ ql/src/test/results/clientpositive/perf/spark/query49.q.out @@ -490,7 +490,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS FIRST + order by: (CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -527,7 +527,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS FIRST + order by: (CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -645,7 +645,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS FIRST + order by: (CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -682,7 +682,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS FIRST + order by: (CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -769,7 +769,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS FIRST + order by: (CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -806,7 +806,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS FIRST + order by: (CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/perf/spark/query51.q.out ql/src/test/results/clientpositive/perf/spark/query51.q.out index 5651f8c324..01c707ad08 100644 --- ql/src/test/results/clientpositive/perf/spark/query51.q.out +++ ql/src/test/results/clientpositive/perf/spark/query51.q.out @@ -200,7 +200,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -260,7 +260,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -285,7 +285,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: int), _col1 (type: string) 1 _col0 (type: int), _col1 (type: string) @@ -313,7 +313,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: CASE WHEN (_col4 is not null) THEN (_col4) ELSE (_col1) END ASC NULLS FIRST + order by: CASE WHEN (_col4 is not null) THEN (_col4) ELSE (_col1) END ASC NULLS LAST partition by: CASE WHEN (_col3 is not null) THEN (_col3) ELSE (_col0) END raw input shape: window functions: diff --git ql/src/test/results/clientpositive/perf/spark/query57.q.out ql/src/test/results/clientpositive/perf/spark/query57.q.out index 502d5f7d8d..e084a8714d 100644 --- ql/src/test/results/clientpositive/perf/spark/query57.q.out +++ ql/src/test/results/clientpositive/perf/spark/query57.q.out @@ -493,7 +493,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col5, _col4, _col3 raw input shape: window functions: @@ -634,7 +634,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col4, _col3, _col2 raw input shape: window functions: @@ -712,7 +712,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col4, _col3, _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/perf/spark/query97.q.out ql/src/test/results/clientpositive/perf/spark/query97.q.out index 7e7d791377..a5da85bbc8 100644 --- ql/src/test/results/clientpositive/perf/spark/query97.q.out +++ ql/src/test/results/clientpositive/perf/spark/query97.q.out @@ -177,7 +177,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: int), _col1 (type: int) 1 _col0 (type: int), _col1 (type: int) diff --git ql/src/test/results/clientpositive/perf/tez/query47.q.out ql/src/test/results/clientpositive/perf/tez/query47.q.out index f931483d57..53b2b67c6a 100644 --- ql/src/test/results/clientpositive/perf/tez/query47.q.out +++ ql/src/test/results/clientpositive/perf/tez/query47.q.out @@ -144,7 +144,7 @@ Stage-0 Filter Operator [FIL_327] (rows=63887519 width=88) predicate:((_col0 > 0) and (_col1 = 2000) and rank_window_1 is not null) PTF Operator [PTF_326] (rows=383325119 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST","partition by:":"_col4, _col3, _col5, _col6"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST, _col2 ASC NULLS LAST","partition by:":"_col4, _col3, _col5, _col6"}] Select Operator [SEL_325] (rows=383325119 width=88) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"] <-Reducer 10 [SIMPLE_EDGE] vectorized @@ -255,7 +255,7 @@ Stage-0 Filter Operator [FIL_313] (rows=383325119 width=88) predicate:rank_window_0 is not null PTF Operator [PTF_312] (rows=383325119 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col3, _col2, _col4, _col5"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col3, _col2, _col4, _col5"}] Select Operator [SEL_311] (rows=383325119 width=88) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] <-Reducer 5 [SIMPLE_EDGE] vectorized @@ -270,7 +270,7 @@ Stage-0 Filter Operator [FIL_318] (rows=383325119 width=88) predicate:rank_window_0 is not null PTF Operator [PTF_317] (rows=383325119 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col3, _col2, _col4, _col5"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col3, _col2, _col4, _col5"}] Select Operator [SEL_316] (rows=383325119 width=88) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] <-Reducer 5 [SIMPLE_EDGE] vectorized diff --git ql/src/test/results/clientpositive/perf/tez/query49.q.out ql/src/test/results/clientpositive/perf/tez/query49.q.out index 6f642ef1b2..5c1889c883 100644 --- ql/src/test/results/clientpositive/perf/tez/query49.q.out +++ ql/src/test/results/clientpositive/perf/tez/query49.q.out @@ -307,7 +307,7 @@ Stage-0 Filter Operator [FIL_347] (rows=8604378 width=88) predicate:((_col0 <= 10) or (rank_window_1 <= 10)) PTF Operator [PTF_346] (rows=12906568 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_345] (rows=12906568 width=88) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] <-Reducer 23 [SIMPLE_EDGE] vectorized @@ -316,7 +316,7 @@ Stage-0 Select Operator [SEL_343] (rows=12906568 width=88) Output:["rank_window_0","_col0","_col1","_col2","_col3","_col4"] PTF Operator [PTF_342] (rows=12906568 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_341] (rows=12906568 width=88) Output:["_col0","_col1","_col2","_col3","_col4"] <-Reducer 22 [SIMPLE_EDGE] vectorized @@ -398,7 +398,7 @@ Stage-0 Filter Operator [FIL_327] (rows=4302070 width=135) predicate:((_col0 <= 10) or (rank_window_1 <= 10)) PTF Operator [PTF_326] (rows=6453105 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_325] (rows=6453105 width=135) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] <-Reducer 17 [SIMPLE_EDGE] vectorized @@ -407,7 +407,7 @@ Stage-0 Select Operator [SEL_323] (rows=6453105 width=135) Output:["rank_window_0","_col0","_col1","_col2","_col3","_col4"] PTF Operator [PTF_322] (rows=6453105 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_321] (rows=6453105 width=135) Output:["_col0","_col1","_col2","_col3","_col4"] <-Reducer 16 [SIMPLE_EDGE] vectorized @@ -472,7 +472,7 @@ Stage-0 Filter Operator [FIL_296] (rows=2151150 width=135) predicate:((_col0 <= 10) or (rank_window_1 <= 10)) PTF Operator [PTF_295] (rows=3226726 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col4 AS decimal(15,4)) / CAST( _col5 AS decimal(15,4))) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_294] (rows=3226726 width=135) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] <-Reducer 5 [SIMPLE_EDGE] vectorized @@ -481,7 +481,7 @@ Stage-0 Select Operator [SEL_292] (rows=3226726 width=135) Output:["rank_window_0","_col0","_col1","_col2","_col3","_col4"] PTF Operator [PTF_291] (rows=3226726 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS FIRST","partition by:":"0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"(CAST( _col1 AS decimal(15,4)) / CAST( _col2 AS decimal(15,4))) ASC NULLS LAST","partition by:":"0"}] Select Operator [SEL_290] (rows=3226726 width=135) Output:["_col0","_col1","_col2","_col3","_col4"] <-Reducer 4 [SIMPLE_EDGE] vectorized diff --git ql/src/test/results/clientpositive/perf/tez/query51.q.out ql/src/test/results/clientpositive/perf/tez/query51.q.out index ec9f50cb28..6f728d69ae 100644 --- ql/src/test/results/clientpositive/perf/tez/query51.q.out +++ ql/src/test/results/clientpositive/perf/tez/query51.q.out @@ -118,7 +118,7 @@ Stage-0 Filter Operator [FIL_112] (rows=116159124 width=88) predicate:(max_window_0 > max_window_1) PTF Operator [PTF_111] (rows=348477374 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"CASE WHEN (_col4 is not null) THEN (_col4) ELSE (_col1) END ASC NULLS FIRST","partition by:":"CASE WHEN (_col3 is not null) THEN (_col3) ELSE (_col0) END"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"CASE WHEN (_col4 is not null) THEN (_col4) ELSE (_col1) END ASC NULLS LAST","partition by:":"CASE WHEN (_col3 is not null) THEN (_col3) ELSE (_col0) END"}] Select Operator [SEL_110] (rows=348477374 width=88) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] <-Reducer 4 [SIMPLE_EDGE] @@ -132,7 +132,7 @@ Stage-0 Select Operator [SEL_37] (rows=79201469 width=135) Output:["_col0","_col1","_col2"] PTF Operator [PTF_36] (rows=79201469 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Group By Operator [GBY_32] (rows=79201469 width=135) Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1 <-Reducer 9 [SIMPLE_EDGE] @@ -177,7 +177,7 @@ Stage-0 Select Operator [SEL_17] (rows=316797606 width=88) Output:["_col0","_col1","_col2"] PTF Operator [PTF_16] (rows=316797606 width=88) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Group By Operator [GBY_12] (rows=316797606 width=88) Output:["_col0","_col1","_col2"],aggregations:["sum(VALUE._col0)"],keys:KEY._col0, KEY._col1 <-Reducer 2 [SIMPLE_EDGE] diff --git ql/src/test/results/clientpositive/perf/tez/query57.q.out ql/src/test/results/clientpositive/perf/tez/query57.q.out index fed340af9b..769321fa53 100644 --- ql/src/test/results/clientpositive/perf/tez/query57.q.out +++ ql/src/test/results/clientpositive/perf/tez/query57.q.out @@ -138,7 +138,7 @@ Stage-0 Filter Operator [FIL_327] (rows=31942874 width=135) predicate:((_col0 > 0) and (_col1 = 2000) and rank_window_1 is not null) PTF Operator [PTF_326] (rows=191657247 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST","partition by:":"_col5, _col4, _col3"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST, _col2 ASC NULLS LAST","partition by:":"_col5, _col4, _col3"}] Select Operator [SEL_325] (rows=191657247 width=135) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] <-Reducer 10 [SIMPLE_EDGE] vectorized @@ -249,7 +249,7 @@ Stage-0 Filter Operator [FIL_313] (rows=191657247 width=135) predicate:rank_window_0 is not null PTF Operator [PTF_312] (rows=191657247 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col4, _col3, _col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col4, _col3, _col2"}] Select Operator [SEL_311] (rows=191657247 width=135) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] <-Reducer 5 [SIMPLE_EDGE] vectorized @@ -264,7 +264,7 @@ Stage-0 Filter Operator [FIL_318] (rows=191657247 width=135) predicate:rank_window_0 is not null PTF Operator [PTF_317] (rows=191657247 width=135) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col4, _col3, _col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col0 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col4, _col3, _col2"}] Select Operator [SEL_316] (rows=191657247 width=135) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] <-Reducer 5 [SIMPLE_EDGE] vectorized diff --git ql/src/test/results/clientpositive/pointlookup2.q.out ql/src/test/results/clientpositive/pointlookup2.q.out index b627a56daf..3457c1b562 100644 --- ql/src/test/results/clientpositive/pointlookup2.q.out +++ ql/src/test/results/clientpositive/pointlookup2.q.out @@ -125,7 +125,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -435,7 +435,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -720,7 +720,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1046,7 +1046,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col4 (type: int), _col5 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 20 Data size: 540 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1425,7 +1425,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 30 Data size: 810 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1707,7 +1707,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2017,7 +2017,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2302,7 +2302,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2620,7 +2620,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col4 (type: int), _col5 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 20 Data size: 540 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2991,7 +2991,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 30 Data size: 810 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/pointlookup3.q.out ql/src/test/results/clientpositive/pointlookup3.q.out index 855e2da0bf..d5cf4b2003 100644 --- ql/src/test/results/clientpositive/pointlookup3.q.out +++ ql/src/test/results/clientpositive/pointlookup3.q.out @@ -79,7 +79,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -264,7 +264,7 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -527,7 +527,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col3 (type: int), _col4 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -814,7 +814,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col3 (type: int), _col4 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1143,7 +1143,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col4 (type: int), _col5 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 1200 Data size: 20400 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1473,7 +1473,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1653,7 +1653,7 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1916,7 +1916,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col3 (type: int), _col4 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2203,7 +2203,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col3 (type: int), _col4 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2524,7 +2524,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col4 (type: int), _col5 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 1200 Data size: 20400 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/pointlookup4.q.out ql/src/test/results/clientpositive/pointlookup4.q.out index 63a0e37f2f..2e12b8ae41 100644 --- ql/src/test/results/clientpositive/pointlookup4.q.out +++ ql/src/test/results/clientpositive/pointlookup4.q.out @@ -79,7 +79,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -278,7 +278,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/ppd_vc.q.out ql/src/test/results/clientpositive/ppd_vc.q.out index e8ea1baf60..eae73895a1 100644 --- ql/src/test/results/clientpositive/ppd_vc.q.out +++ ql/src/test/results/clientpositive/ppd_vc.q.out @@ -676,7 +676,7 @@ STAGE PLANS: GatherStats: false Reduce Output Operator key expressions: _col2 (type: string), _col3 (type: string), _col4 (type: bigint) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 732 Data size: 7782 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/ptf_matchpath.q.out ql/src/test/results/clientpositive/ptf_matchpath.q.out index 8d6f071f7e..51fe10cca9 100644 --- ql/src/test/results/clientpositive/ptf_matchpath.q.out +++ ql/src/test/results/clientpositive/ptf_matchpath.q.out @@ -88,7 +88,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: _col6 raw input shape: @@ -206,7 +206,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col6 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: 0 raw input shape: @@ -322,7 +322,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col6 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: 0 raw input shape: diff --git ql/src/test/results/clientpositive/push_or.q.out ql/src/test/results/clientpositive/push_or.q.out index c59de9eb3a..837a7ceca9 100644 --- ql/src/test/results/clientpositive/push_or.q.out +++ ql/src/test/results/clientpositive/push_or.q.out @@ -61,7 +61,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/quotedid_basic.q.out ql/src/test/results/clientpositive/quotedid_basic.q.out index b3b9b4258c..a749b91474 100644 --- ql/src/test/results/clientpositive/quotedid_basic.q.out +++ ql/src/test/results/clientpositive/quotedid_basic.q.out @@ -215,7 +215,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: '1' raw input shape: window functions: @@ -320,7 +320,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: '1' raw input shape: window functions: diff --git ql/src/test/results/clientpositive/sample6.q.out ql/src/test/results/clientpositive/sample6.q.out index a6a6f2cb90..e93e096005 100644 --- ql/src/test/results/clientpositive/sample6.q.out +++ ql/src/test/results/clientpositive/sample6.q.out @@ -729,7 +729,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1136,7 +1136,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1766,7 +1766,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2278,7 +2278,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2730,7 +2730,7 @@ STAGE PLANS: Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -3108,7 +3108,7 @@ STAGE PLANS: Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -3377,7 +3377,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/semijoin2.q.out ql/src/test/results/clientpositive/semijoin2.q.out index f4454f0f92..37544922ec 100644 --- ql/src/test/results/clientpositive/semijoin2.q.out +++ ql/src/test/results/clientpositive/semijoin2.q.out @@ -157,7 +157,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (_col98 + _col16) ASC NULLS FIRST, floor(_col21) DESC NULLS LAST + order by: (_col98 + _col16) ASC NULLS LAST, floor(_col21) DESC NULLS LAST partition by: (_col98 + _col16) raw input shape: window functions: diff --git ql/src/test/results/clientpositive/semijoin4.q.out ql/src/test/results/clientpositive/semijoin4.q.out index 2b758834a0..83db8b85bc 100644 --- ql/src/test/results/clientpositive/semijoin4.q.out +++ ql/src/test/results/clientpositive/semijoin4.q.out @@ -169,7 +169,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (UDFToShort(UDFToByte(-92)) + _col1) ASC NULLS FIRST, floor(_col3) DESC NULLS LAST + order by: (UDFToShort(UDFToByte(-92)) + _col1) ASC NULLS LAST, floor(_col3) DESC NULLS LAST partition by: (UDFToShort(UDFToByte(-92)) + _col1) raw input shape: window functions: diff --git ql/src/test/results/clientpositive/semijoin5.q.out ql/src/test/results/clientpositive/semijoin5.q.out index b4c7af846a..fc65f85b47 100644 --- ql/src/test/results/clientpositive/semijoin5.q.out +++ ql/src/test/results/clientpositive/semijoin5.q.out @@ -165,7 +165,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (_col7 + UDFToInteger(_col5)) ASC NULLS FIRST, floor(_col3) DESC NULLS LAST + order by: (_col7 + UDFToInteger(_col5)) ASC NULLS LAST, floor(_col3) DESC NULLS LAST partition by: (_col7 + UDFToInteger(_col5)) raw input shape: window functions: diff --git ql/src/test/results/clientpositive/serde_regex.q.out ql/src/test/results/clientpositive/serde_regex.q.out index 5a19ec93a4..bf10555de2 100644 --- ql/src/test/results/clientpositive/serde_regex.q.out +++ ql/src/test/results/clientpositive/serde_regex.q.out @@ -200,7 +200,6 @@ POSTHOOK: query: SELECT key, value FROM serde_regex1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@serde_regex1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -238,6 +237,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: DROP TABLE serde_regex1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@serde_regex1 diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out index df40ba850a..172566886b 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin1.q.out @@ -304,14 +304,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n67 POSTHOOK: Input: default@t2_n40 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: EXPLAIN SELECT count(1) FROM T1_n67 a JOIN T2_n40 b ON a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out index b53e83b0e6..4ba0f912ae 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin10.q.out @@ -336,14 +336,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n151 POSTHOOK: Input: default@t2_n88 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: EXPLAIN SELECT count(1) FROM T1_n151 a JOIN T2_n88 b ON a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out index fa0f615144..35c22226d9 100644 --- ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out +++ ql/src/test/results/clientpositive/skewjoin_mapjoin2.q.out @@ -224,7 +224,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -299,7 +299,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -330,8 +330,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n25 POSTHOOK: Input: default@t2_n16 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 1 11 NULL NULL 2 12 2 22 3 13 3 13 @@ -340,3 +338,5 @@ NULL NULL 5 15 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 diff --git ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out index 68a59728cb..a7351df5d7 100644 --- ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out +++ ql/src/test/results/clientpositive/skewjoin_union_remove_1.q.out @@ -310,14 +310,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n57 POSTHOOK: Input: default@t2_n35 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: create table DEST1_n58(key1 STRING, val1 STRING, key2 STRING, val2 STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -646,11 +646,11 @@ ORDER BY key1, key2, val1, val2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1_n58 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 diff --git ql/src/test/results/clientpositive/skewjoinopt1.q.out ql/src/test/results/clientpositive/skewjoinopt1.q.out index c948c785ae..93ef074e71 100644 --- ql/src/test/results/clientpositive/skewjoinopt1.q.out +++ ql/src/test/results/clientpositive/skewjoinopt1.q.out @@ -356,14 +356,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n101 POSTHOOK: Input: default@t2_n64 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: EXPLAIN SELECT count(1) FROM T1_n101 a JOIN T2_n64 b ON a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/skewjoinopt3.q.out ql/src/test/results/clientpositive/skewjoinopt3.q.out index 6de674cf38..2f23c1749c 100644 --- ql/src/test/results/clientpositive/skewjoinopt3.q.out +++ ql/src/test/results/clientpositive/skewjoinopt3.q.out @@ -250,7 +250,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -325,7 +325,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -356,8 +356,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n12 POSTHOOK: Input: default@t2_n7 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 1 11 NULL NULL 2 12 2 22 3 13 3 13 @@ -366,3 +364,5 @@ NULL NULL 5 15 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 diff --git ql/src/test/results/clientpositive/smb_mapjoin_1.q.out ql/src/test/results/clientpositive/smb_mapjoin_1.q.out index 1f334bd0c2..75210a39bb 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_1.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_1.q.out @@ -272,7 +272,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2060 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -494,7 +494,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2080 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/smb_mapjoin_13.q.out ql/src/test/results/clientpositive/smb_mapjoin_13.q.out index 123e84457a..81e34ee440 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_13.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_13.q.out @@ -97,7 +97,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + tag: -1 TopN: 10 @@ -278,7 +278,7 @@ STAGE PLANS: Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/smb_mapjoin_2.q.out ql/src/test/results/clientpositive/smb_mapjoin_2.q.out index e39edcc2fd..480b12eb19 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_2.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_2.q.out @@ -232,7 +232,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2220 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -458,7 +458,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2080 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/smb_mapjoin_3.q.out ql/src/test/results/clientpositive/smb_mapjoin_3.q.out index fa7968da80..ba0a2ce723 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_3.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_3.q.out @@ -231,7 +231,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2220 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -455,7 +455,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2060 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/smb_mapjoin_46.q.out ql/src/test/results/clientpositive/smb_mapjoin_46.q.out index 098b53a471..9c0c35883b 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_46.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_46.q.out @@ -1319,7 +1319,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1418,7 +1418,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1515,7 +1515,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 1 @@ -1614,7 +1614,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col1 (type: int) 1 _col1 (type: int) diff --git ql/src/test/results/clientpositive/smb_mapjoin_47.q.out ql/src/test/results/clientpositive/smb_mapjoin_47.q.out index a5f3fb952c..c25a8ec111 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_47.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_47.q.out @@ -1336,7 +1336,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col1 (type: int) 1 _col1 (type: int) @@ -1442,7 +1442,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col1 (type: int) 1 _col1 (type: int) diff --git ql/src/test/results/clientpositive/smb_mapjoin_7.q.out ql/src/test/results/clientpositive/smb_mapjoin_7.q.out index 83033b07c0..ef5cca6209 100644 --- ql/src/test/results/clientpositive/smb_mapjoin_7.q.out +++ ql/src/test/results/clientpositive/smb_mapjoin_7.q.out @@ -633,7 +633,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out index 667b9b391e..6dd5a99585 100644 --- ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out +++ ql/src/test/results/clientpositive/spark/annotate_stats_join.q.out @@ -1012,7 +1012,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string), _col1 (type: int) 1 _col1 (type: string), _col0 (type: int) diff --git ql/src/test/results/clientpositive/spark/auto_join14.q.out ql/src/test/results/clientpositive/spark/auto_join14.q.out index 42cee3e56b..ffa7342103 100644 --- ql/src/test/results/clientpositive/spark/auto_join14.q.out +++ ql/src/test/results/clientpositive/spark/auto_join14.q.out @@ -129,4 +129,4 @@ POSTHOOK: query: SELECT sum(hash(dest1_n83.c1,dest1_n83.c2)) FROM dest1_n83 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1_n83 #### A masked pattern was here #### -404554174174 +404540072956 diff --git ql/src/test/results/clientpositive/spark/auto_join18.q.out ql/src/test/results/clientpositive/spark/auto_join18.q.out index 35a01e4d8a..33d15f6d45 100644 --- ql/src/test/results/clientpositive/spark/auto_join18.q.out +++ ql/src/test/results/clientpositive/spark/auto_join18.q.out @@ -99,7 +99,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/auto_join18_multi_distinct.q.out ql/src/test/results/clientpositive/spark/auto_join18_multi_distinct.q.out index def1de2382..43f6ca5a76 100644 --- ql/src/test/results/clientpositive/spark/auto_join18_multi_distinct.q.out +++ ql/src/test/results/clientpositive/spark/auto_join18_multi_distinct.q.out @@ -101,7 +101,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/auto_join6.q.out ql/src/test/results/clientpositive/spark/auto_join6.q.out index b17ca86650..f7c910eed9 100644 --- ql/src/test/results/clientpositive/spark/auto_join6.q.out +++ ql/src/test/results/clientpositive/spark/auto_join6.q.out @@ -92,7 +92,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/auto_join7.q.out ql/src/test/results/clientpositive/spark/auto_join7.q.out index aeec43e04e..cc67d2f1a0 100644 --- ql/src/test/results/clientpositive/spark/auto_join7.q.out +++ ql/src/test/results/clientpositive/spark/auto_join7.q.out @@ -122,7 +122,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 0 to 2 keys: 0 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/auto_join_filters.q.out ql/src/test/results/clientpositive/spark/auto_join_filters.q.out index 4af239b069..09c167ae8e 100644 --- ql/src/test/results/clientpositive/spark/auto_join_filters.q.out +++ ql/src/test/results/clientpositive/spark/auto_join_filters.q.out @@ -44,6 +44,109 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 3080335 +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Work 'Reducer 2' is a cross product +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Work 'Reducer 2' is a cross product +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Spark + Edges: + Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1), Map 4 (PARTITION-LEVEL SORT, 1) + Reducer 3 <- Reducer 2 (GROUP, 1) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 {(VALUE._col0 > 40)} {(VALUE._col1 > 50)} {(VALUE._col0 = VALUE._col1)} + 1 {(VALUE._col0 > 40)} {(VALUE._col1 > 50)} {(VALUE._col0 = VALUE._col1)} + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 441 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 441 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Work 'Reducer 2' is a cross product PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY @@ -162,6 +265,138 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 3080335 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Spark + Edges: + Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 4 (PARTITION-LEVEL SORT, 2) + Reducer 3 <- Reducer 2 (GROUP, 1) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 {(KEY.reducesinkkey0 > 40)} {(VALUE._col0 > 50)} {(KEY.reducesinkkey0 = VALUE._col0)} + 1 {(VALUE._col0 > 40)} {(KEY.reducesinkkey0 > 50)} {(VALUE._col0 = KEY.reducesinkkey0)} + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 242 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 242 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n5 @@ -340,6 +575,16 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 4939870 +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Work 'Reducer 2' is a cross product +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b on a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n5 @@ -448,6 +693,138 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n5 #### A masked pattern was here #### 3080335 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.value = b.value and a.key=b.key AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n5 +#### A masked pattern was here #### +4939870 +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Spark + Edges: + Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 4 (PARTITION-LEVEL SORT, 2) + Reducer 3 <- Reducer 2 (GROUP, 1) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + filter predicates: + 0 {(KEY.reducesinkkey0 > 40)} {(VALUE._col0 > 50)} {(KEY.reducesinkkey0 = VALUE._col0)} + 1 {(VALUE._col0 > 40)} {(KEY.reducesinkkey0 > 50)} {(VALUE._col0 = KEY.reducesinkkey0)} + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 242 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 242 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n5 a FULL OUTER JOIN myinput1_n5 b ON a.key = b.value AND a.key > 40 AND a.value > 50 AND a.key = a.value AND b.key > 40 AND b.value > 50 AND b.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n5 diff --git ql/src/test/results/clientpositive/spark/auto_join_nulls.q.out ql/src/test/results/clientpositive/spark/auto_join_nulls.q.out index 5f089d7534..a1f669b7e3 100644 --- ql/src/test/results/clientpositive/spark/auto_join_nulls.q.out +++ ql/src/test/results/clientpositive/spark/auto_join_nulls.q.out @@ -152,6 +152,135 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@myinput1_n2 #### A masked pattern was here #### 3079923 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value and a.key=b.key +PREHOOK: type: QUERY +PREHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.value = b.value and a.key=b.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@myinput1_n2 +#### A masked pattern was here #### +4543526 +PREHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Spark + Edges: + Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 4 (PARTITION-LEVEL SORT, 2) + Reducer 3 <- Reducer 2 (GROUP, 1) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: a + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: int) + sort order: + + Map-reduce partition columns: _col0 (type: int) + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: int) + Execution mode: vectorized + Map 4 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 1 Data size: 160 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col0 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: hash(_col0,_col1,_col2,_col3) (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 176 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(_col0) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reducer 3 + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: SELECT sum(hash(a.key,a.value,b.key,b.value)) FROM myinput1_n2 a FULL OUTER JOIN myinput1_n2 b ON a.key = b.value PREHOOK: type: QUERY PREHOOK: Input: default@myinput1_n2 diff --git ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out index 18e75aa231..87f0ca84db 100644 --- ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out +++ ql/src/test/results/clientpositive/spark/bucket_map_join_tez1.q.out @@ -289,74 +289,12 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 -2 val_2 val_2 -4 val_4 val_4 -8 val_8 val_8 -11 val_11 val_11 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -17 val_17 val_17 -19 val_19 val_19 -20 val_20 val_20 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -28 val_28 val_28 -33 val_33 val_33 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -44 val_44 val_44 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -53 val_53 val_53 -57 val_57 val_57 -64 val_64 val_64 -66 val_66 val_66 -77 val_77 val_77 -80 val_80 val_80 -82 val_82 val_82 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -86 val_86 val_86 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 103 val_103 val_103 103 val_103 val_103 103 val_103 val_103 103 val_103 val_103 105 val_105 val_105 +11 val_11 val_11 114 val_114 val_114 116 val_116 val_116 118 val_118 val_118 @@ -398,6 +336,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 149 val_149 val_149 149 val_149 val_149 149 val_149 val_149 +15 val_15 val_15 +15 val_15 val_15 +15 val_15 val_15 +15 val_15 val_15 150 val_150 val_150 152 val_152 val_152 152 val_152 val_152 @@ -435,6 +377,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 169 val_169 val_169 169 val_169 val_169 169 val_169 val_169 +17 val_17 val_17 170 val_170 val_170 172 val_172 val_172 172 val_172 val_172 @@ -461,10 +404,13 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 187 val_187 val_187 187 val_187 val_187 189 val_189 val_189 +19 val_19 val_19 190 val_190 val_190 192 val_192 val_192 194 val_194 val_194 196 val_196 val_196 +2 val_2 val_2 +20 val_20 val_20 200 val_200 val_200 200 val_200 val_200 200 val_200 val_200 @@ -511,6 +457,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 239 val_239 val_239 239 val_239 val_239 239 val_239 val_239 +24 val_24 val_24 +24 val_24 val_24 +24 val_24 val_24 +24 val_24 val_24 242 val_242 val_242 242 val_242 val_242 242 val_242 val_242 @@ -522,6 +472,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 255 val_255 val_255 255 val_255 val_255 257 val_257 val_257 +26 val_26 val_26 +26 val_26 val_26 +26 val_26 val_26 +26 val_26 val_26 260 val_260 val_260 262 val_262 val_262 266 val_266 val_266 @@ -551,6 +505,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 277 val_277 val_277 277 val_277 val_277 277 val_277 val_277 +28 val_28 val_28 280 val_280 val_280 280 val_280 val_280 280 val_280 val_280 @@ -612,11 +567,21 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 327 val_327 val_327 327 val_327 val_327 327 val_327 val_327 +33 val_33 val_33 332 val_332 val_332 336 val_336 val_336 338 val_338 val_338 341 val_341 val_341 345 val_345 val_345 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 356 val_356 val_356 365 val_365 val_365 367 val_367 val_367 @@ -632,6 +597,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 369 val_369 val_369 369 val_369 val_369 369 val_369 val_369 +37 val_37 val_37 +37 val_37 val_37 +37 val_37 val_37 +37 val_37 val_37 374 val_374 val_374 378 val_378 val_378 389 val_389 val_389 @@ -646,6 +615,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 396 val_396 val_396 396 val_396 val_396 396 val_396 val_396 +4 val_4 val_4 400 val_400 val_400 402 val_402 val_402 404 val_404 val_404 @@ -683,6 +653,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 417 val_417 val_417 417 val_417 val_417 419 val_419 val_419 +42 val_42 val_42 +42 val_42 val_42 +42 val_42 val_42 +42 val_42 val_42 424 val_424 val_424 424 val_424 val_424 424 val_424 val_424 @@ -702,6 +676,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 439 val_439 val_439 439 val_439 val_439 439 val_439 val_439 +44 val_44 val_44 444 val_444 val_444 446 val_446 val_446 448 val_448 val_448 @@ -760,6 +735,31 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 493 val_493 val_493 495 val_495 val_495 497 val_497 val_497 +51 val_51 val_51 +51 val_51 val_51 +51 val_51 val_51 +51 val_51 val_51 +53 val_53 val_53 +57 val_57 val_57 +64 val_64 val_64 +66 val_66 val_66 +77 val_77 val_77 +8 val_8 val_8 +80 val_80 val_80 +82 val_82 val_82 +84 val_84 val_84 +84 val_84 val_84 +84 val_84 val_84 +84 val_84 val_84 +86 val_86 val_86 +95 val_95 val_95 +95 val_95 val_95 +95 val_95 val_95 +95 val_95 val_95 +97 val_97 val_97 +97 val_97 val_97 +97 val_97 val_97 +97 val_97 val_97 PREHOOK: query: explain select a.key, a.value, b.value from tab_n8 a join tab_part_n9 b on a.key = b.key order by a.key, a.value, b.value @@ -885,74 +885,12 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 -2 val_2 val_2 -4 val_4 val_4 -8 val_8 val_8 -11 val_11 val_11 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -17 val_17 val_17 -19 val_19 val_19 -20 val_20 val_20 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -28 val_28 val_28 -33 val_33 val_33 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -44 val_44 val_44 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -53 val_53 val_53 -57 val_57 val_57 -64 val_64 val_64 -66 val_66 val_66 -77 val_77 val_77 -80 val_80 val_80 -82 val_82 val_82 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -86 val_86 val_86 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 103 val_103 val_103 103 val_103 val_103 103 val_103 val_103 103 val_103 val_103 105 val_105 val_105 +11 val_11 val_11 114 val_114 val_114 116 val_116 val_116 118 val_118 val_118 @@ -994,6 +932,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 149 val_149 val_149 149 val_149 val_149 149 val_149 val_149 +15 val_15 val_15 +15 val_15 val_15 +15 val_15 val_15 +15 val_15 val_15 150 val_150 val_150 152 val_152 val_152 152 val_152 val_152 @@ -1031,6 +973,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 169 val_169 val_169 169 val_169 val_169 169 val_169 val_169 +17 val_17 val_17 170 val_170 val_170 172 val_172 val_172 172 val_172 val_172 @@ -1057,10 +1000,13 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 187 val_187 val_187 187 val_187 val_187 189 val_189 val_189 +19 val_19 val_19 190 val_190 val_190 192 val_192 val_192 194 val_194 val_194 196 val_196 val_196 +2 val_2 val_2 +20 val_20 val_20 200 val_200 val_200 200 val_200 val_200 200 val_200 val_200 @@ -1107,6 +1053,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 239 val_239 val_239 239 val_239 val_239 239 val_239 val_239 +24 val_24 val_24 +24 val_24 val_24 +24 val_24 val_24 +24 val_24 val_24 242 val_242 val_242 242 val_242 val_242 242 val_242 val_242 @@ -1118,6 +1068,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 255 val_255 val_255 255 val_255 val_255 257 val_257 val_257 +26 val_26 val_26 +26 val_26 val_26 +26 val_26 val_26 +26 val_26 val_26 260 val_260 val_260 262 val_262 val_262 266 val_266 val_266 @@ -1147,6 +1101,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 277 val_277 val_277 277 val_277 val_277 277 val_277 val_277 +28 val_28 val_28 280 val_280 val_280 280 val_280 val_280 280 val_280 val_280 @@ -1208,11 +1163,21 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 327 val_327 val_327 327 val_327 val_327 327 val_327 val_327 +33 val_33 val_33 332 val_332 val_332 336 val_336 val_336 338 val_338 val_338 341 val_341 val_341 345 val_345 val_345 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 +35 val_35 val_35 356 val_356 val_356 365 val_365 val_365 367 val_367 val_367 @@ -1228,6 +1193,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 369 val_369 val_369 369 val_369 val_369 369 val_369 val_369 +37 val_37 val_37 +37 val_37 val_37 +37 val_37 val_37 +37 val_37 val_37 374 val_374 val_374 378 val_378 val_378 389 val_389 val_389 @@ -1242,6 +1211,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 396 val_396 val_396 396 val_396 val_396 396 val_396 val_396 +4 val_4 val_4 400 val_400 val_400 402 val_402 val_402 404 val_404 val_404 @@ -1279,6 +1249,10 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 417 val_417 val_417 417 val_417 val_417 419 val_419 val_419 +42 val_42 val_42 +42 val_42 val_42 +42 val_42 val_42 +42 val_42 val_42 424 val_424 val_424 424 val_424 val_424 424 val_424 val_424 @@ -1298,6 +1272,7 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 439 val_439 val_439 439 val_439 val_439 439 val_439 val_439 +44 val_44 val_44 444 val_444 val_444 446 val_446 val_446 448 val_448 val_448 @@ -1356,6 +1331,31 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 493 val_493 val_493 495 val_495 val_495 497 val_497 val_497 +51 val_51 val_51 +51 val_51 val_51 +51 val_51 val_51 +51 val_51 val_51 +53 val_53 val_53 +57 val_57 val_57 +64 val_64 val_64 +66 val_66 val_66 +77 val_77 val_77 +8 val_8 val_8 +80 val_80 val_80 +82 val_82 val_82 +84 val_84 val_84 +84 val_84 val_84 +84 val_84 val_84 +84 val_84 val_84 +86 val_86 val_86 +95 val_95 val_95 +95 val_95 val_95 +95 val_95 val_95 +95 val_95 val_95 +97 val_97 val_97 +97 val_97 val_97 +97 val_97 val_97 +97 val_97 val_97 PREHOOK: query: explain select count(*) from @@ -4848,329 +4848,432 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 -2 val_2 val_2 -4 val_4 val_4 -8 val_8 val_8 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 +105 val_105 val_105 11 val_11 val_11 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -17 val_17 val_17 -19 val_19 val_19 -20 val_20 val_20 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -28 val_28 val_28 -33 val_33 val_33 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -44 val_44 val_44 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -53 val_53 val_53 -57 val_57 val_57 -64 val_64 val_64 -66 val_66 val_66 -77 val_77 val_77 -80 val_80 val_80 -82 val_82 val_82 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -86 val_86 val_86 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 -105 val_105 val_105 114 val_114 val_114 116 val_116 val_116 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 136 val_136 val_136 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 143 val_143 val_143 145 val_145 val_145 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 150 val_150 val_150 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 156 val_156 val_156 158 val_158 val_158 163 val_163 val_163 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 +17 val_17 val_17 170 val_170 val_170 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 178 val_178 val_178 181 val_181 val_181 183 val_183 val_183 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 189 val_189 val_189 +19 val_19 val_19 190 val_190 val_190 192 val_192 val_192 194 val_194 val_194 196 val_196 val_196 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 +2 val_2 val_2 +20 val_20 val_20 202 val_202 val_202 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 222 val_222 val_222 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 226 val_226 val_226 228 val_228 val_228 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 235 val_235 val_235 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 244 val_244 val_244 248 val_248 val_248 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 257 val_257 val_257 260 val_260 val_260 262 val_262 val_262 266 val_266 val_266 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 275 val_275 val_275 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 +28 val_28 val_28 284 val_284 val_284 286 val_286 val_286 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 291 val_291 val_291 305 val_305 val_305 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 310 val_310 val_310 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 323 val_323 val_323 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 +33 val_33 val_33 332 val_332 val_332 336 val_336 val_336 338 val_338 val_338 @@ -5178,147 +5281,44 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 345 val_345 val_345 356 val_356 val_356 365 val_365 val_365 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 374 val_374 val_374 378 val_378 val_378 389 val_389 val_389 392 val_392 val_392 394 val_394 val_394 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 +4 val_4 val_4 400 val_400 val_400 402 val_402 val_402 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 411 val_411 val_411 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 419 val_419 val_419 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 435 val_435 val_435 437 val_437 val_437 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 +44 val_44 val_44 444 val_444 val_444 446 val_446 val_446 448 val_448 val_448 453 val_453 val_453 455 val_455 val_455 457 val_457 val_457 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 460 val_460 val_460 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 475 val_475 val_475 477 val_477 val_477 479 val_479 val_479 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 482 val_482 val_482 484 val_484 val_484 491 val_491 val_491 493 val_493 val_493 495 val_495 val_495 497 val_497 val_497 +53 val_53 val_53 +57 val_57 val_57 +64 val_64 val_64 +66 val_66 val_66 +77 val_77 val_77 +8 val_8 val_8 +80 val_80 val_80 +82 val_82 val_82 +86 val_86 val_86 PREHOOK: query: explain select a.key, a.value, b.value from tab_n8 a join tab_part_n9 b on a.key = b.key and a.value = b.value PREHOOK: type: QUERY @@ -5431,329 +5431,432 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 -2 val_2 val_2 -4 val_4 val_4 -8 val_8 val_8 -11 val_11 val_11 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -17 val_17 val_17 -19 val_19 val_19 -20 val_20 val_20 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -28 val_28 val_28 -33 val_33 val_33 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -44 val_44 val_44 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -53 val_53 val_53 -57 val_57 val_57 -64 val_64 val_64 -66 val_66 val_66 -77 val_77 val_77 -80 val_80 val_80 -82 val_82 val_82 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -86 val_86 val_86 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 105 val_105 val_105 +11 val_11 val_11 114 val_114 val_114 116 val_116 val_116 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 136 val_136 val_136 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 143 val_143 val_143 145 val_145 val_145 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 150 val_150 val_150 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 156 val_156 val_156 158 val_158 val_158 163 val_163 val_163 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 +17 val_17 val_17 170 val_170 val_170 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 178 val_178 val_178 181 val_181 val_181 183 val_183 val_183 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 189 val_189 val_189 +19 val_19 val_19 190 val_190 val_190 192 val_192 val_192 194 val_194 val_194 196 val_196 val_196 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 +2 val_2 val_2 +20 val_20 val_20 202 val_202 val_202 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 222 val_222 val_222 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 226 val_226 val_226 228 val_228 val_228 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 235 val_235 val_235 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 244 val_244 val_244 248 val_248 val_248 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 257 val_257 val_257 260 val_260 val_260 262 val_262 val_262 266 val_266 val_266 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 275 val_275 val_275 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 -284 val_284 val_284 -286 val_286 val_286 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 -291 val_291 val_291 -305 val_305 val_305 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 -310 val_310 val_310 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 +28 val_28 val_28 +284 val_284 val_284 +286 val_286 val_286 +291 val_291 val_291 +305 val_305 val_305 +310 val_310 val_310 323 val_323 val_323 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 +33 val_33 val_33 332 val_332 val_332 336 val_336 val_336 338 val_338 val_338 @@ -5761,147 +5864,44 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 345 val_345 val_345 356 val_356 val_356 365 val_365 val_365 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 374 val_374 val_374 378 val_378 val_378 389 val_389 val_389 392 val_392 val_392 394 val_394 val_394 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 +4 val_4 val_4 400 val_400 val_400 402 val_402 val_402 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 411 val_411 val_411 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 419 val_419 val_419 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 435 val_435 val_435 437 val_437 val_437 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 +44 val_44 val_44 444 val_444 val_444 446 val_446 val_446 448 val_448 val_448 453 val_453 val_453 455 val_455 val_455 457 val_457 val_457 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 460 val_460 val_460 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 475 val_475 val_475 477 val_477 val_477 479 val_479 val_479 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 482 val_482 val_482 484 val_484 val_484 491 val_491 val_491 493 val_493 val_493 495 val_495 val_495 497 val_497 val_497 +53 val_53 val_53 +57 val_57 val_57 +64 val_64 val_64 +66 val_66 val_66 +77 val_77 val_77 +8 val_8 val_8 +80 val_80 val_80 +82 val_82 val_82 +86 val_86 val_86 PREHOOK: query: CREATE TABLE tab2_n4(key int, value string) PARTITIONED BY(ds STRING) STORED AS TEXTFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -6041,338 +6041,441 @@ POSTHOOK: Input: default@tab2_n4@ds=2008-04-08 POSTHOOK: Input: default@tab_part_n9 POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 #### A masked pattern was here #### -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -0 val_0 val_0 -2 val_2 val_2 -4 val_4 val_4 -8 val_8 val_8 -11 val_11 val_11 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -17 val_17 val_17 -19 val_19 val_19 -20 val_20 val_20 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -28 val_28 val_28 -33 val_33 val_33 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -44 val_44 val_44 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -53 val_53 val_53 -57 val_57 val_57 -64 val_64 val_64 -66 val_66 val_66 -77 val_77 val_77 -80 val_80 val_80 -82 val_82 val_82 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -86 val_86 val_86 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 -105 val_105 val_105 -114 val_114 val_114 -116 val_116 val_116 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 -136 val_136 val_136 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_0 val_0 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 +105 val_105 val_105 +11 val_11 val_11 +114 val_114 val_114 +116 val_116 val_116 +136 val_136 val_136 143 val_143 val_143 145 val_145 val_145 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 150 val_150 val_150 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 156 val_156 val_156 158 val_158 val_158 163 val_163 val_163 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 +17 val_17 val_17 170 val_170 val_170 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 178 val_178 val_178 181 val_181 val_181 183 val_183 val_183 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 189 val_189 val_189 +19 val_19 val_19 190 val_190 val_190 192 val_192 val_192 194 val_194 val_194 196 val_196 val_196 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 +2 val_2 val_2 +20 val_20 val_20 202 val_202 val_202 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 222 val_222 val_222 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 226 val_226 val_226 228 val_228 val_228 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 235 val_235 val_235 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 244 val_244 val_244 248 val_248 val_248 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 257 val_257 val_257 260 val_260 val_260 262 val_262 val_262 266 val_266 val_266 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 275 val_275 val_275 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 +28 val_28 val_28 284 val_284 val_284 286 val_286 val_286 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 291 val_291 val_291 305 val_305 val_305 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 310 val_310 val_310 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 323 val_323 val_323 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 +33 val_33 val_33 332 val_332 val_332 336 val_336 val_336 338 val_338 val_338 @@ -6380,147 +6483,44 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 345 val_345 val_345 356 val_356 val_356 365 val_365 val_365 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -374 val_374 val_374 -378 val_378 val_378 -389 val_389 val_389 -392 val_392 val_392 -394 val_394 val_394 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -400 val_400 val_400 -402 val_402 val_402 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 +374 val_374 val_374 +378 val_378 val_378 +389 val_389 val_389 +392 val_392 val_392 +394 val_394 val_394 +4 val_4 val_4 +400 val_400 val_400 +402 val_402 val_402 411 val_411 val_411 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 419 val_419 val_419 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 435 val_435 val_435 437 val_437 val_437 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 +44 val_44 val_44 444 val_444 val_444 446 val_446 val_446 448 val_448 val_448 453 val_453 val_453 455 val_455 val_455 457 val_457 val_457 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 460 val_460 val_460 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 475 val_475 val_475 477 val_477 val_477 479 val_479 val_479 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 482 val_482 val_482 484 val_484 val_484 491 val_491 val_491 493 val_493 val_493 495 val_495 val_495 497 val_497 val_497 +53 val_53 val_53 +57 val_57 val_57 +64 val_64 val_64 +66 val_66 val_66 +77 val_77 val_77 +8 val_8 val_8 +80 val_80 val_80 +82 val_82 val_82 +86 val_86 val_86 PREHOOK: query: explain select a.key, a.value, b.value from tab2_n4 a join tab_part_n9 b on a.key = b.key and a.value = b.value PREHOOK: type: QUERY @@ -6633,329 +6633,432 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 0 val_0 val_0 0 val_0 val_0 0 val_0 val_0 -2 val_2 val_2 -4 val_4 val_4 -8 val_8 val_8 -11 val_11 val_11 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -15 val_15 val_15 -17 val_17 val_17 -19 val_19 val_19 -20 val_20 val_20 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -24 val_24 val_24 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -26 val_26 val_26 -28 val_28 val_28 -33 val_33 val_33 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -35 val_35 val_35 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -37 val_37 val_37 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -42 val_42 val_42 -44 val_44 val_44 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -51 val_51 val_51 -53 val_53 val_53 -57 val_57 val_57 -64 val_64 val_64 -66 val_66 val_66 -77 val_77 val_77 -80 val_80 val_80 -82 val_82 val_82 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -84 val_84 val_84 -86 val_86 val_86 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -95 val_95 val_95 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -97 val_97 val_97 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 -103 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_103 val_103 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_118 val_118 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_125 val_125 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_129 val_129 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_134 val_134 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_138 val_138 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_149 val_149 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_15 val_15 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_152 val_152 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_165 val_165 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_167 val_167 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_169 val_169 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_172 val_172 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_174 val_174 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_176 val_176 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_187 val_187 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_200 val_200 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_208 val_208 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_213 val_213 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_217 val_217 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_219 val_219 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_224 val_224 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_233 val_233 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_237 val_237 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_239 val_239 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_24 val_24 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_242 val_242 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_255 val_255 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_26 val_26 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_273 val_273 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_277 val_277 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_280 val_280 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_282 val_282 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_288 val_288 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_307 val_307 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_309 val_309 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_316 val_316 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_318 val_318 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_321 val_321 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_325 val_325 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_327 val_327 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_35 val_35 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_367 val_367 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_369 val_369 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_37 val_37 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_396 val_396 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_404 val_404 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_406 val_406 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_413 val_413 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_417 val_417 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_42 val_42 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_424 val_424 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_431 val_431 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_439 val_439 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_459 val_459 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_462 val_462 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_466 val_466 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_468 val_468 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_480 val_480 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_51 val_51 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_84 val_84 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_95 val_95 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 +0 val_97 val_97 105 val_105 val_105 +11 val_11 val_11 114 val_114 val_114 116 val_116 val_116 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -118 val_118 val_118 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -125 val_125 val_125 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -129 val_129 val_129 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 -134 val_134 val_134 136 val_136 val_136 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 -138 val_138 val_138 143 val_143 val_143 145 val_145 val_145 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 -149 val_149 val_149 150 val_150 val_150 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 -152 val_152 val_152 156 val_156 val_156 158 val_158 val_158 163 val_163 val_163 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -165 val_165 val_165 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -167 val_167 val_167 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 -169 val_169 val_169 +17 val_17 val_17 170 val_170 val_170 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -172 val_172 val_172 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -174 val_174 val_174 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 -176 val_176 val_176 178 val_178 val_178 181 val_181 val_181 183 val_183 val_183 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 -187 val_187 val_187 189 val_189 val_189 +19 val_19 val_19 190 val_190 val_190 192 val_192 val_192 -194 val_194 val_194 -196 val_196 val_196 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 -200 val_200 val_200 +194 val_194 val_194 +196 val_196 val_196 +2 val_2 val_2 +20 val_20 val_20 202 val_202 val_202 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -208 val_208 val_208 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -217 val_217 val_217 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 -219 val_219 val_219 222 val_222 val_222 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 -224 val_224 val_224 226 val_226 val_226 228 val_228 val_228 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 -233 val_233 val_233 235 val_235 val_235 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -237 val_237 val_237 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -239 val_239 val_239 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 -242 val_242 val_242 244 val_244 val_244 248 val_248 val_248 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 257 val_257 val_257 260 val_260 val_260 262 val_262 val_262 266 val_266 val_266 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 275 val_275 val_275 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -277 val_277 val_277 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -280 val_280 val_280 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 -282 val_282 val_282 +28 val_28 val_28 284 val_284 val_284 286 val_286 val_286 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 -288 val_288 val_288 291 val_291 val_291 305 val_305 val_305 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -307 val_307 val_307 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 -309 val_309 val_309 310 val_310 val_310 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -316 val_316 val_316 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -318 val_318 val_318 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 -321 val_321 val_321 323 val_323 val_323 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -325 val_325 val_325 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 -327 val_327 val_327 +33 val_33 val_33 332 val_332 val_332 336 val_336 val_336 338 val_338 val_338 @@ -6963,144 +7066,41 @@ POSTHOOK: Input: default@tab_part_n9@ds=2008-04-08 345 val_345 val_345 356 val_356 val_356 365 val_365 val_365 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -367 val_367 val_367 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 -369 val_369 val_369 374 val_374 val_374 378 val_378 val_378 389 val_389 val_389 392 val_392 val_392 394 val_394 val_394 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 -396 val_396 val_396 +4 val_4 val_4 400 val_400 val_400 402 val_402 val_402 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -404 val_404 val_404 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 411 val_411 val_411 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -413 val_413 val_413 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 -417 val_417 val_417 419 val_419 val_419 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -424 val_424 val_424 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 -431 val_431 val_431 435 val_435 val_435 437 val_437 val_437 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 -439 val_439 val_439 +44 val_44 val_44 444 val_444 val_444 446 val_446 val_446 448 val_448 val_448 453 val_453 val_453 455 val_455 val_455 457 val_457 val_457 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 -459 val_459 val_459 460 val_460 val_460 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -462 val_462 val_462 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -466 val_466 val_466 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 -468 val_468 val_468 475 val_475 val_475 477 val_477 val_477 479 val_479 val_479 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 -480 val_480 val_480 482 val_482 val_482 484 val_484 val_484 491 val_491 val_491 493 val_493 val_493 495 val_495 val_495 497 val_497 val_497 +53 val_53 val_53 +57 val_57 val_57 +64 val_64 val_64 +66 val_66 val_66 +77 val_77 val_77 +8 val_8 val_8 +80 val_80 val_80 +82 val_82 val_82 +86 val_86 val_86 diff --git ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out index 2ee75fde53..9565606c46 100644 --- ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out +++ ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out @@ -191,7 +191,7 @@ STAGE PLANS: Statistics: Num rows: 75 Data size: 30250 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 75 Data size: 30250 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark index 2a4bb8d616..c23ac09e0e 100644 --- ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark +++ ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark @@ -188,7 +188,7 @@ STAGE PLANS: Statistics: Num rows: 75 Data size: 30250 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 75 Data size: 30250 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/cbo_limit.q.out ql/src/test/results/clientpositive/spark/cbo_limit.q.out index c5825788e7..a5e36d2eb9 100644 --- ql/src/test/results/clientpositive/spark/cbo_limit.q.out +++ ql/src/test/results/clientpositive/spark/cbo_limit.q.out @@ -8,7 +8,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL NULL +1 4 2 PREHOOK: query: select x, y, count(*) from (select key, (c_int+c_float+1+2) as x, sum(c_int) as y from cbo_t1 group by c_float, cbo_t1.c_int, key) R group by y, x order by x,y limit 1 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 @@ -19,7 +19,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL 1 +5.0 2 3 PREHOOK: query: select key from(select key from (select key from cbo_t1 limit 5)cbo_t2 limit 5)cbo_t3 limit 5 PREHOOK: type: QUERY PREHOOK: Input: default@cbo_t1 @@ -45,8 +45,8 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t1 POSTHOOK: Input: default@cbo_t1@dt=2014 #### A masked pattern was here #### -NULL NULL -NULL NULL +1 1 +1 1 1 1 1 1 1 1 diff --git ql/src/test/results/clientpositive/spark/groupby_resolution.q.out ql/src/test/results/clientpositive/spark/groupby_resolution.q.out index a6e9b46afe..81a392cbcf 100644 --- ql/src/test/results/clientpositive/spark/groupby_resolution.q.out +++ ql/src/test/results/clientpositive/spark/groupby_resolution.q.out @@ -694,7 +694,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/join18.q.out ql/src/test/results/clientpositive/spark/join18.q.out index 478b6a6222..659a1c346f 100644 --- ql/src/test/results/clientpositive/spark/join18.q.out +++ ql/src/test/results/clientpositive/spark/join18.q.out @@ -98,7 +98,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/join18_multi_distinct.q.out ql/src/test/results/clientpositive/spark/join18_multi_distinct.q.out index c7b18d7e14..75009b57a2 100644 --- ql/src/test/results/clientpositive/spark/join18_multi_distinct.q.out +++ ql/src/test/results/clientpositive/spark/join18_multi_distinct.q.out @@ -100,7 +100,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/join32.q.out ql/src/test/results/clientpositive/spark/join32.q.out index 665cf6705b..62ef1a3a02 100644 --- ql/src/test/results/clientpositive/spark/join32.q.out +++ ql/src/test/results/clientpositive/spark/join32.q.out @@ -393,88 +393,88 @@ POSTHOOK: query: select * from dest_j1_n12 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1_n12 #### A masked pattern was here #### -146 val_146 val_146 -146 val_146 val_146 -146 val_146 val_146 -146 val_146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 150 val_150 val_150 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -238 val_238 val_238 -238 val_238 val_238 -238 val_238 val_238 -238 val_238 val_238 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -278 val_278 val_278 -278 val_278 val_278 -278 val_278 val_278 -278 val_278 val_278 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 66 val_66 val_66 -98 val_98 val_98 -98 val_98 val_98 -98 val_98 val_98 -98 val_98 val_98 +98 val_98 +98 val_98 +98 val_98 +98 val_98 diff --git ql/src/test/results/clientpositive/spark/join33.q.out ql/src/test/results/clientpositive/spark/join33.q.out index 13cd446350..09198b0981 100644 --- ql/src/test/results/clientpositive/spark/join33.q.out +++ ql/src/test/results/clientpositive/spark/join33.q.out @@ -393,88 +393,88 @@ POSTHOOK: query: select * from dest_j1_n7 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_j1_n7 #### A masked pattern was here #### -146 val_146 val_146 -146 val_146 val_146 -146 val_146 val_146 -146 val_146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 150 val_150 val_150 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -213 val_213 val_213 -238 val_238 val_238 -238 val_238 val_238 -238 val_238 val_238 -238 val_238 val_238 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -255 val_255 val_255 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -273 val_273 val_273 -278 val_278 val_278 -278 val_278 val_278 -278 val_278 val_278 -278 val_278 val_278 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -311 val_311 val_311 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -401 val_401 val_401 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 -406 val_406 val_406 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +401 val_401 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 +406 val_406 66 val_66 val_66 -98 val_98 val_98 -98 val_98 val_98 -98 val_98 val_98 -98 val_98 val_98 +98 val_98 +98 val_98 +98 val_98 +98 val_98 diff --git ql/src/test/results/clientpositive/spark/join6.q.out ql/src/test/results/clientpositive/spark/join6.q.out index 6075e5fb34..caa0849874 100644 --- ql/src/test/results/clientpositive/spark/join6.q.out +++ ql/src/test/results/clientpositive/spark/join6.q.out @@ -92,7 +92,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/join7.q.out ql/src/test/results/clientpositive/spark/join7.q.out index 9ebb3e33e0..423e5184c1 100644 --- ql/src/test/results/clientpositive/spark/join7.q.out +++ ql/src/test/results/clientpositive/spark/join7.q.out @@ -122,7 +122,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 0 to 2 keys: 0 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out index 26acd7eff3..749b926bcf 100644 --- ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out +++ ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out @@ -1312,7 +1312,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 1 to 2 Left Outer Join 0 to 3 filter mappings: diff --git ql/src/test/results/clientpositive/spark/limit_pushdown.q.out ql/src/test/results/clientpositive/spark/limit_pushdown.q.out index 20875870eb..fea2866e75 100644 --- ql/src/test/results/clientpositive/spark/limit_pushdown.q.out +++ ql/src/test/results/clientpositive/spark/limit_pushdown.q.out @@ -431,6 +431,7 @@ POSTHOOK: query: select distinct(cdouble) as dis from alltypesorc order by dis l POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-16243.0 -16269.0 -16274.0 -16277.0 @@ -450,7 +451,6 @@ POSTHOOK: Input: default@alltypesorc -16372.0 -16373.0 -16379.0 -NULL PREHOOK: query: explain select ctinyint, count(distinct(cdouble)) from alltypesorc group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -527,6 +527,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesorc grou POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -546,7 +547,6 @@ POSTHOOK: Input: default@alltypesorc -62 27 -63 19 -64 24 -NULL 2932 PREHOOK: query: explain select ctinyint, count(cdouble) from (select ctinyint, cdouble from alltypesorc group by ctinyint, cdouble) t1 group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -623,6 +623,7 @@ POSTHOOK: query: select ctinyint, count(cdouble) from (select ctinyint, cdouble POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -642,7 +643,6 @@ POSTHOOK: Input: default@alltypesorc -62 27 -63 19 -64 24 -NULL 2932 PREHOOK: query: explain select ctinyint, count(distinct(cstring1)), count(distinct(cstring2)) from alltypesorc group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY @@ -714,6 +714,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cstring1)), count(distinct(cstr POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 3 24 -46 3 19 -47 3 23 -48 3 27 @@ -733,7 +734,6 @@ POSTHOOK: Input: default@alltypesorc -62 3 23 -63 3 16 -64 3 13 -NULL 3065 3 PREHOOK: query: explain select key,value from src order by key limit 0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/spark/limit_pushdown2.q.out ql/src/test/results/clientpositive/spark/limit_pushdown2.q.out index a05c114b79..8e3aafbe12 100644 --- ql/src/test/results/clientpositive/spark/limit_pushdown2.q.out +++ ql/src/test/results/clientpositive/spark/limit_pushdown2.q.out @@ -1170,23 +1170,23 @@ order by key, value limit 20 POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -NULL NULL 261.182 -NULL val_0 1.0 -NULL val_10 11.0 -NULL val_100 101.0 -NULL val_103 104.0 -NULL val_104 105.0 -NULL val_105 106.0 -NULL val_11 12.0 -NULL val_111 112.0 -NULL val_113 114.0 -NULL val_114 115.0 -NULL val_116 117.0 -NULL val_118 119.0 -NULL val_119 120.0 -NULL val_12 13.0 -NULL val_120 121.0 -NULL val_125 126.0 -NULL val_126 127.0 -NULL val_128 129.0 -NULL val_129 130.0 +0 val_0 1.0 +10 val_10 11.0 +100 val_100 101.0 +103 val_103 104.0 +104 val_104 105.0 +105 val_105 106.0 +11 val_11 12.0 +111 val_111 112.0 +113 val_113 114.0 +114 val_114 115.0 +116 val_116 117.0 +118 val_118 119.0 +119 val_119 120.0 +12 val_12 13.0 +120 val_120 121.0 +125 val_125 126.0 +126 val_126 127.0 +128 val_128 129.0 +129 val_129 130.0 +131 val_131 132.0 diff --git ql/src/test/results/clientpositive/spark/mergejoins_mixed.q.out ql/src/test/results/clientpositive/spark/mergejoins_mixed.q.out index 1a3bf0d096..d334ca4843 100644 --- ql/src/test/results/clientpositive/spark/mergejoins_mixed.q.out +++ ql/src/test/results/clientpositive/spark/mergejoins_mixed.q.out @@ -895,7 +895,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col3 (type: string) 1 _col0 (type: string) @@ -911,7 +911,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1205,7 +1205,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -1499,7 +1499,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col0 (type: string) 1 _col0 (type: string) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_0.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_0.q.out index 288c38d6e9..6973da0057 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_0.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_0.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(ctinyint) as c1, MAX(ctinyint), COUNT(ctinyint), @@ -6,7 +6,7 @@ SELECT MIN(ctinyint) as c1, FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(ctinyint) as c1, MAX(ctinyint), COUNT(ctinyint), @@ -37,7 +37,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint @@ -62,10 +61,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: tinyint), _col1 (type: tinyint), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized @@ -78,27 +75,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: VALUE._col0:tinyint, VALUE._col1:tinyint, VALUE._col2:bigint, VALUE._col3:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: min(VALUE._col0), max(VALUE._col1), count(VALUE._col2), count(VALUE._col3) @@ -117,10 +101,8 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: tinyint), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -128,16 +110,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: KEY.reducesinkkey0:tinyint, VALUE._col0:tinyint, VALUE._col1:bigint, VALUE._col2:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), VALUE._col0 (type: tinyint), VALUE._col1 (type: bigint), VALUE._col2 (type: bigint) @@ -183,12 +158,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -64 62 9173 12288 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(ctinyint) as c1 FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(ctinyint) as c1 FROM alltypesparquet ORDER BY c1 @@ -216,7 +191,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint @@ -241,10 +215,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -257,27 +229,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: VALUE._col0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -296,26 +255,17 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: bigint) @@ -504,7 +454,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -4.344925324321378 1158.3003004768175 1158.3003004768175 1158.426587033782 34.03381113652741 34.03381113652741 34.03381113652741 34.03566639620535 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cbigint) as c1, MAX(cbigint), COUNT(cbigint), @@ -512,7 +462,7 @@ SELECT MIN(cbigint) as c1, FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cbigint) as c1, MAX(cbigint), COUNT(cbigint), @@ -543,7 +493,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cbigint (type: bigint) outputColumnNames: cbigint @@ -568,10 +517,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized @@ -584,27 +531,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [3] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: VALUE._col0:bigint, VALUE._col1:bigint, VALUE._col2:bigint, VALUE._col3:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: min(VALUE._col0), max(VALUE._col1), count(VALUE._col2), count(VALUE._col3) @@ -623,10 +557,8 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -634,16 +566,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: KEY.reducesinkkey0:bigint, VALUE._col0:bigint, VALUE._col1:bigint, VALUE._col2:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint), VALUE._col2 (type: bigint) @@ -689,12 +614,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -2147311592 2145498388 9173 12288 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cbigint) as c1 FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cbigint) as c1 FROM alltypesparquet ORDER BY c1 @@ -722,7 +647,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cbigint (type: bigint) outputColumnNames: cbigint @@ -747,10 +671,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -763,27 +685,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [3] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: VALUE._col0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -802,26 +711,17 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: bigint) @@ -1010,7 +910,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -1.8515862077935246E8 2.07689300543066035E18 2.07689300543066035E18 2.07711944383072922E18 1.441142951074133E9 1.441142951074133E9 1.441142951074133E9 1.4412215110213728E9 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cfloat) as c1, MAX(cfloat), COUNT(cfloat), @@ -1018,7 +918,7 @@ SELECT MIN(cfloat) as c1, FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MIN(cfloat) as c1, MAX(cfloat), COUNT(cfloat), @@ -1049,7 +949,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat @@ -1074,10 +973,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: float), _col1 (type: float), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized @@ -1090,27 +987,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [4] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: VALUE._col0:float, VALUE._col1:float, VALUE._col2:bigint, VALUE._col3:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: min(VALUE._col0), max(VALUE._col1), count(VALUE._col2), count(VALUE._col3) @@ -1129,10 +1013,8 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: float), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -1140,16 +1022,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 4 - dataColumns: KEY.reducesinkkey0:float, VALUE._col0:float, VALUE._col1:bigint, VALUE._col2:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: float), VALUE._col0 (type: float), VALUE._col1 (type: bigint), VALUE._col2 (type: bigint) @@ -1195,12 +1070,12 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -64.0 79.553 9173 12288 -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cfloat) as c1 FROM alltypesparquet ORDER BY c1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cfloat) as c1 FROM alltypesparquet ORDER BY c1 @@ -1228,7 +1103,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat @@ -1253,10 +1127,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double) Execution mode: vectorized @@ -1269,27 +1141,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [4] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: VALUE._col0:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -1308,26 +1167,17 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY.reducesinkkey0:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double) @@ -1517,7 +1367,7 @@ POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -4.303895780321011 1163.8972588605056 1163.8972588605056 1164.0241556397098 34.11593848717203 34.11593848717203 34.11593848717203 34.11779822379677 WARNING: Comparing a bigint and a double may result in a loss of precision. -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(cbigint), (-(AVG(cbigint))), (-6432 + AVG(cbigint)), @@ -1544,7 +1394,7 @@ WHERE (((cstring2 LIKE '%b%') AND ((cboolean2 = 1) AND (3569 = ctinyint)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(cbigint), (-(AVG(cbigint))), (-6432 + AVG(cbigint)), @@ -1594,7 +1444,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -1627,10 +1476,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6] Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: bigint), _col5 (type: double), _col6 (type: tinyint) Execution mode: vectorized @@ -1643,27 +1490,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 7, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(13,3), double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 7 - dataColumns: VALUE._col0:bigint, VALUE._col1:bigint, VALUE._col2:double, VALUE._col3:double, VALUE._col4:bigint, VALUE._col5:double, VALUE._col6:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3), count(VALUE._col4), sum(VALUE._col5), min(VALUE._col6) @@ -31049,7 +30883,7 @@ STAGE PLANS: Statistics: Num rows: 6144 Data size: 73728 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 6144 Data size: 73728 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out index 5c79743429..8f5c06caf2 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT VAR_POP(ctinyint), (VAR_POP(ctinyint) / -26.28), SUM(cfloat), @@ -19,7 +19,7 @@ WHERE (((cdouble > ctinyint) OR ((cint > cbigint) OR (cboolean1 < 0)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT VAR_POP(ctinyint), (VAR_POP(ctinyint) / -26.28), SUM(cfloat), @@ -63,7 +63,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -96,10 +95,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] Statistics: Num rows: 1 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: tinyint), _col5 (type: int), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: bigint) Execution mode: vectorized @@ -112,27 +109,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 2, 3, 4, 5, 10, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 10 - dataColumns: VALUE._col0:double, VALUE._col1:double, VALUE._col2:bigint, VALUE._col3:double, VALUE._col4:tinyint, VALUE._col5:int, VALUE._col6:double, VALUE._col7:double, VALUE._col8:bigint, VALUE._col9:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1), count(VALUE._col2), sum(VALUE._col3), max(VALUE._col4), max(VALUE._col5), sum(VALUE._col6), sum(VALUE._col7), count(VALUE._col8), count(VALUE._col9) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out index 4305feee5a..9a3b654c95 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdouble, ctimestamp1, ctinyint, @@ -22,7 +22,7 @@ WHERE (((cstring2 <= '10') AND ((csmallint = 9763215.5639) OR (cstring1 LIKE '%a')))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cdouble, ctimestamp1, ctinyint, @@ -67,7 +67,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -104,12 +103,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 3, 5, 6, 7, 8, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, decimal(6,2), decimal(11,4), double, double, double, double, double, bigint, bigint, bigint, double, double, double] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_11.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_11.q.out index 995316e2d1..320259c05f 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_11.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_11.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cboolean1, cdouble, @@ -13,7 +13,7 @@ WHERE ((cstring2 = cstring1) OR ((ctimestamp1 IS NULL) AND (cstring1 LIKE '%a'))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cboolean1, cdouble, @@ -49,7 +49,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -86,12 +85,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [1, 5, 6, 7, 8, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, double, double, double, double] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_12.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_12.q.out index de7abe08df..8dd930a1eb 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_12.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_12.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cbigint, cboolean1, cstring1, @@ -30,7 +30,7 @@ WHERE (((ctimestamp1 IS NULL) GROUP BY cbigint, cboolean1, cstring1, ctimestamp1, cdouble ORDER BY ctimestamp1, cdouble, cbigint, cstring1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cbigint, cboolean1, cstring1, @@ -86,7 +86,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -123,10 +122,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double), _col1 (type: bigint), _col2 (type: string), _col3 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 5, 6, 7, 8, 9, 10] Statistics: Num rows: 3754 Data size: 45048 Basic stats: COMPLETE Column stats: NONE value expressions: _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: bigint), _col10 (type: double) Execution mode: vectorized @@ -139,27 +136,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 3, 5, 6, 8, 10, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaa - reduceColumnSortOrder: ++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 11 - dataColumns: KEY._col0:double, KEY._col1:bigint, KEY._col2:string, KEY._col3:boolean, VALUE._col0:bigint, VALUE._col1:double, VALUE._col2:double, VALUE._col3:double, VALUE._col4:bigint, VALUE._col5:bigint, VALUE._col6:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3), count(VALUE._col4), sum(VALUE._col5), sum(VALUE._col6) @@ -189,10 +173,8 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 11, 12, 4, 13, 14, 19, 15, 20, 22, 24, 9, 26, 25, 21, 27] Statistics: Num rows: 1877 Data size: 22524 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col4 (type: double), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: decimal(22,2)), _col14 (type: bigint), _col15 (type: double), _col17 (type: double), _col18 (type: double), _col19 (type: double) Reducer 3 @@ -200,16 +182,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaa - reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 19 - dataColumns: KEY.reducesinkkey0:double, KEY.reducesinkkey1:bigint, KEY.reducesinkkey2:string, VALUE._col0:boolean, VALUE._col1:double, VALUE._col2:bigint, VALUE._col3:bigint, VALUE._col4:bigint, VALUE._col5:double, VALUE._col6:double, VALUE._col7:double, VALUE._col8:double, VALUE._col9:double, VALUE._col10:decimal(22,2), VALUE._col11:bigint, VALUE._col12:double, VALUE._col13:double, VALUE._col14:double, VALUE._col15:double - partitionColumnCount: 0 - scratchColumnTypeNames: [timestamp] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey1 (type: bigint), VALUE._col0 (type: boolean), KEY.reducesinkkey2 (type: string), null (type: timestamp), KEY.reducesinkkey0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: bigint), VALUE._col3 (type: bigint), VALUE._col4 (type: bigint), VALUE._col5 (type: double), VALUE._col6 (type: double), VALUE._col7 (type: double), VALUE._col8 (type: double), VALUE._col9 (type: double), VALUE._col10 (type: decimal(22,2)), VALUE._col11 (type: bigint), VALUE._col12 (type: double), VALUE._col8 (type: double), VALUE._col13 (type: double), VALUE._col14 (type: double), VALUE._col15 (type: double) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out index 09b50c77b5..78a2428cfb 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_13.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, ctimestamp1, @@ -31,7 +31,7 @@ GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1 ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16 LIMIT 40 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, ctimestamp1, @@ -88,7 +88,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -125,10 +124,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 14] Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE value expressions: _col5 (type: tinyint), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: bigint), _col10 (type: double), _col11 (type: double), _col12 (type: bigint), _col13 (type: float), _col14 (type: tinyint) Execution mode: vectorized @@ -141,27 +138,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 4, 5, 6, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, decimal(11,4), double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaaa - reduceColumnSortOrder: +++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 15 - dataColumns: KEY._col0:boolean, KEY._col1:tinyint, KEY._col2:timestamp, KEY._col3:float, KEY._col4:string, VALUE._col0:tinyint, VALUE._col1:double, VALUE._col2:double, VALUE._col3:double, VALUE._col4:bigint, VALUE._col5:double, VALUE._col6:double, VALUE._col7:bigint, VALUE._col8:float, VALUE._col9:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: max(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3), count(VALUE._col4), sum(VALUE._col5), sum(VALUE._col6), count(VALUE._col7), max(VALUE._col8), min(VALUE._col9) @@ -191,10 +175,8 @@ STAGE PLANS: sort order: +++++++++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3, 4, 15, 5, 17, 6, 20, 19, 21, 22, 23, 24, 27, 28, 25, 13, 31, 14] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1365 Data size: 16380 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Reducer 3 @@ -202,16 +184,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaaaaaaaaa - reduceColumnSortOrder: +++++++++++++++++++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 21 - dataColumns: KEY.reducesinkkey0:boolean, KEY.reducesinkkey1:tinyint, KEY.reducesinkkey2:timestamp, KEY.reducesinkkey3:float, KEY.reducesinkkey4:string, KEY.reducesinkkey5:tinyint, KEY.reducesinkkey6:tinyint, KEY.reducesinkkey7:tinyint, KEY.reducesinkkey8:double, KEY.reducesinkkey9:double, KEY.reducesinkkey10:double, KEY.reducesinkkey11:float, KEY.reducesinkkey12:double, KEY.reducesinkkey13:double, KEY.reducesinkkey14:double, KEY.reducesinkkey15:decimal(7,3), KEY.reducesinkkey16:double, KEY.reducesinkkey17:double, KEY.reducesinkkey18:float, KEY.reducesinkkey19:double, KEY.reducesinkkey20:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: boolean), KEY.reducesinkkey1 (type: tinyint), KEY.reducesinkkey2 (type: timestamp), KEY.reducesinkkey3 (type: float), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: tinyint), KEY.reducesinkkey6 (type: tinyint), KEY.reducesinkkey7 (type: tinyint), KEY.reducesinkkey8 (type: double), KEY.reducesinkkey9 (type: double), KEY.reducesinkkey10 (type: double), KEY.reducesinkkey11 (type: float), KEY.reducesinkkey12 (type: double), KEY.reducesinkkey10 (type: double), KEY.reducesinkkey14 (type: double), KEY.reducesinkkey15 (type: decimal(7,3)), KEY.reducesinkkey16 (type: double), KEY.reducesinkkey17 (type: double), KEY.reducesinkkey18 (type: float), KEY.reducesinkkey19 (type: double), KEY.reducesinkkey20 (type: tinyint) @@ -312,46 +287,46 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -NULL -55 1969-12-31 16:00:11.38 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -55 1969-12-31 16:00:11.751 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -56 1969-12-31 16:00:13.602 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:13.958 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:15.038 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -57 1969-12-31 16:00:11.451 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:11.883 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:12.626 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:13.578 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:15.39 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -58 1969-12-31 16:00:12.065 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.683 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.948 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:14.066 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:15.658 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -59 1969-12-31 16:00:12.008 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.15 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.625 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.296 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.861 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -60 1969-12-31 16:00:11.504 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.641 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.996 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:12.779 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -61 1969-12-31 16:00:11.842 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:12.454 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:14.192 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:16.558 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -55 1969-12-31 16:00:12.297 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -55 1969-12-31 16:00:13.15 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -56 1969-12-31 16:00:11.242 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:13.534 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.038 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.689 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:16.37 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -57 1969-12-31 16:00:11.534 -57.0 cvLH6Eat2yFsyy7p 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:13.365 -57.0 1cGVWH7n1QU 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:14.225 -57.0 821UdmGbkEf4j 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -58 1969-12-31 16:00:12.918 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:13.209 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:14.933 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -59 1969-12-31 16:00:11.065 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.109 -59.0 1cGVWH7n1QU 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.231 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.758 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:12.227 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.242 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.278 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.069 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.125 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -60 1969-12-31 16:00:11.849 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.223 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.291 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:13.567 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:15.188 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:16.165 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.325 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.694 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, @@ -641,43 +616,43 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -NULL -61 1969-12-31 16:00:00.142 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:02.698 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:03.049 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.165 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.977 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:00.037 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.22 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.515 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.734 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:02.373 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:03.85 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:08.198 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.025 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.889 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.069 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.225 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.485 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:01.843 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:03.552 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:06.852 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:07.375 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:10.205 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:00.199 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:00.29 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:01.785 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:03.944 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:05.997 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:10.858 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -61 1969-12-31 16:00:00.554 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.339 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.497 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:03.742 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:07.538 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:09.809 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:10.713 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:00.337 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.659 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.684 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:01.419 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.123 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.922 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:04.978 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.756 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.847 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.903 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:05.654 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:07.623 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:09.14 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 15:59:58.959 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.013 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.172 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.631 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.305 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.79 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:02.496 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:03.088 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:04.662 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:10.273 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_14.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_14.q.out index ca43d13c00..c6d8f3b57e 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_14.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_14.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cfloat, cstring1, @@ -31,7 +31,7 @@ WHERE (((ctinyint <= cbigint) GROUP BY ctimestamp1, cfloat, cstring1, cboolean1, cdouble ORDER BY cstring1, cfloat, cdouble, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cfloat, cstring1, @@ -88,7 +88,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -125,10 +124,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: float), _col2 (type: double), _col3 (type: timestamp), _col4 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5, 6, 7, 8, 9, 10, 11] Statistics: Num rows: 606 Data size: 7272 Basic stats: COMPLETE Column stats: NONE value expressions: _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: float), _col9 (type: double), _col10 (type: double), _col11 (type: bigint) Execution mode: vectorized @@ -141,27 +138,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 2, 3, 4, 5, 6, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaaa - reduceColumnSortOrder: +++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - dataColumns: KEY._col0:string, KEY._col1:float, KEY._col2:double, KEY._col3:timestamp, KEY._col4:boolean, VALUE._col0:double, VALUE._col1:double, VALUE._col2:bigint, VALUE._col3:float, VALUE._col4:double, VALUE._col5:double, VALUE._col6:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1), count(VALUE._col2), max(VALUE._col3), sum(VALUE._col4), sum(VALUE._col5), count(VALUE._col6) @@ -191,10 +175,8 @@ STAGE PLANS: sort order: ++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 12, 14, 13, 15, 8, 19, 20, 21, 22, 11, 24, 25, 23, 29, 28, 31, 34] Statistics: Num rows: 303 Data size: 3636 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: boolean), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: float), _col9 (type: float), _col10 (type: float), _col11 (type: float), _col12 (type: double), _col13 (type: double), _col14 (type: bigint), _col15 (type: double), _col16 (type: double), _col17 (type: double), _col18 (type: double), _col19 (type: double), _col20 (type: double), _col21 (type: double) Reducer 3 @@ -202,16 +184,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaa - reduceColumnSortOrder: ++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 22 - dataColumns: KEY.reducesinkkey0:string, KEY.reducesinkkey1:float, KEY.reducesinkkey2:double, KEY.reducesinkkey3:timestamp, VALUE._col0:boolean, VALUE._col1:double, VALUE._col2:double, VALUE._col3:double, VALUE._col4:float, VALUE._col5:float, VALUE._col6:float, VALUE._col7:float, VALUE._col8:double, VALUE._col9:double, VALUE._col10:bigint, VALUE._col11:double, VALUE._col12:double, VALUE._col13:double, VALUE._col14:double, VALUE._col15:double, VALUE._col16:double, VALUE._col17:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey3 (type: timestamp), KEY.reducesinkkey1 (type: float), KEY.reducesinkkey0 (type: string), VALUE._col0 (type: boolean), KEY.reducesinkkey2 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: double), VALUE._col3 (type: double), VALUE._col4 (type: float), VALUE._col5 (type: float), VALUE._col6 (type: float), VALUE._col7 (type: float), VALUE._col8 (type: double), VALUE._col9 (type: double), VALUE._col10 (type: bigint), VALUE._col11 (type: double), VALUE._col12 (type: double), VALUE._col13 (type: double), VALUE._col14 (type: double), VALUE._col15 (type: double), VALUE._col16 (type: double), VALUE._col17 (type: double) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_15.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_15.q.out index 8497a7e541..51bf4e064b 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_15.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_15.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cboolean1, cdouble, @@ -29,7 +29,7 @@ WHERE (((cstring2 LIKE '%ss%') GROUP BY cfloat, cboolean1, cdouble, cstring1, ctinyint, cint, ctimestamp1 ORDER BY cfloat, cboolean1, cdouble, cstring1, ctinyint, cint, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cboolean1, cdouble, @@ -84,7 +84,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -121,10 +120,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4, 5, 6] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE value expressions: _col7 (type: double), _col8 (type: double), _col9 (type: bigint), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: bigint), _col14 (type: double), _col15 (type: double), _col16 (type: bigint) Execution mode: vectorized @@ -137,12 +134,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 4, 5, 6, 7, 8, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double, double] Reducer 2 Reduce Vectorization: enabled: false diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_16.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_16.q.out index a85f4d32ab..c8ad650a89 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_16.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_16.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -18,7 +18,7 @@ WHERE ((cstring2 LIKE '%b%') OR (cstring1 < 'a'))) GROUP BY cstring1, cdouble, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -61,7 +61,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -98,10 +97,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6] Statistics: Num rows: 4096 Data size: 49152 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint), _col4 (type: double), _col5 (type: double), _col6 (type: double) Execution mode: vectorized @@ -114,27 +111,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [5, 6, 7, 8] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaa - reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 7 - dataColumns: KEY._col0:string, KEY._col1:double, KEY._col2:timestamp, VALUE._col0:bigint, VALUE._col1:double, VALUE._col2:double, VALUE._col3:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), min(VALUE._col3) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_17.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_17.q.out index 4ee2961cc2..a445b44622 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_17.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_17.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cstring1, cint, @@ -22,7 +22,7 @@ WHERE (((cbigint > -23) OR (cfloat = cdouble)))) ORDER BY cbigint, cfloat PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cfloat, cstring1, cint, @@ -69,7 +69,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -91,10 +90,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3, 4] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [6, 2, 8, 5, 15, 16, 14, 17, 19, 20, 22, 18] Statistics: Num rows: 4096 Data size: 49152 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: timestamp), _col4 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: decimal(11,4)), _col13 (type: double) Execution mode: vectorized @@ -107,27 +104,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 8] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [decimal(13,3), double, double, bigint, double, double, double, double, decimal(19,0), decimal(11,4), double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 14 - dataColumns: KEY.reducesinkkey0:bigint, KEY.reducesinkkey1:float, VALUE._col0:string, VALUE._col1:int, VALUE._col2:timestamp, VALUE._col3:double, VALUE._col4:double, VALUE._col5:bigint, VALUE._col6:double, VALUE._col7:double, VALUE._col8:double, VALUE._col9:double, VALUE._col10:decimal(11,4), VALUE._col11:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey1 (type: float), VALUE._col0 (type: string), VALUE._col1 (type: int), VALUE._col2 (type: timestamp), VALUE._col3 (type: double), KEY.reducesinkkey0 (type: bigint), VALUE._col4 (type: double), VALUE._col5 (type: bigint), VALUE._col6 (type: double), VALUE._col7 (type: double), VALUE._col8 (type: double), VALUE._col9 (type: double), VALUE._col10 (type: decimal(11,4)), VALUE._col11 (type: double) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_2.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_2.q.out index 390b1df0c4..5323ab3156 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_2.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_2.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(csmallint), (AVG(csmallint) % -563), (AVG(csmallint) + 762), @@ -21,7 +21,7 @@ WHERE (((ctimestamp1 < ctimestamp2) AND ((-10669 != ctimestamp2) OR (359 > cint)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT AVG(csmallint), (AVG(csmallint) % -563), (AVG(csmallint) + 762), @@ -67,7 +67,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -100,10 +99,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: tinyint), _col8 (type: double), _col9 (type: bigint) Execution mode: vectorized @@ -116,27 +113,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 7, 8, 9] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 10 - dataColumns: VALUE._col0:bigint, VALUE._col1:bigint, VALUE._col2:double, VALUE._col3:double, VALUE._col4:double, VALUE._col5:bigint, VALUE._col6:bigint, VALUE._col7:tinyint, VALUE._col8:double, VALUE._col9:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), count(VALUE._col1), sum(VALUE._col2), sum(VALUE._col3), sum(VALUE._col4), count(VALUE._col5), count(VALUE._col6), min(VALUE._col7), sum(VALUE._col8), count(VALUE._col9) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_3.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_3.q.out index 140f009816..62dd3f5011 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_3.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_3.q.out @@ -1,5 +1,5 @@ WARNING: Comparing a bigint and a double may result in a loss of precision. -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT STDDEV_SAMP(csmallint), (STDDEV_SAMP(csmallint) - 10.175), STDDEV_POP(ctinyint), @@ -24,7 +24,7 @@ WHERE (((cint <= cfloat) AND ((79.553 <= csmallint) AND (ctimestamp1 > ctimestamp2)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT STDDEV_SAMP(csmallint), (STDDEV_SAMP(csmallint) - 10.175), STDDEV_POP(ctinyint), @@ -72,7 +72,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -105,10 +104,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: double), _col10 (type: bigint), _col11 (type: bigint), _col12 (type: double), _col13 (type: double) Execution mode: vectorized @@ -121,27 +118,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 8, 9] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, decimal(22,3), decimal(8,3), double, double, double, double, double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 14 - dataColumns: VALUE._col0:double, VALUE._col1:double, VALUE._col2:bigint, VALUE._col3:double, VALUE._col4:double, VALUE._col5:bigint, VALUE._col6:double, VALUE._col7:double, VALUE._col8:bigint, VALUE._col9:double, VALUE._col10:bigint, VALUE._col11:bigint, VALUE._col12:double, VALUE._col13:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1), count(VALUE._col2), sum(VALUE._col3), sum(VALUE._col4), count(VALUE._col5), sum(VALUE._col6), sum(VALUE._col7), count(VALUE._col8), sum(VALUE._col9), sum(VALUE._col10), count(VALUE._col11), sum(VALUE._col12), sum(VALUE._col13) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_4.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_4.q.out index adf6d603b5..0f544a4b68 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_4.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_4.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cint), (SUM(cint) * -563), (-3728 + SUM(cint)), @@ -21,7 +21,7 @@ WHERE (((csmallint >= cint) AND ((ctinyint != cbigint) OR (-3728 >= cdouble)))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(cint), (SUM(cint) * -563), (-3728 + SUM(cint)), @@ -67,7 +67,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -100,10 +99,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4] Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: double), _col2 (type: double), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized @@ -116,27 +113,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 5 - dataColumns: VALUE._col0:bigint, VALUE._col1:double, VALUE._col2:double, VALUE._col3:bigint, VALUE._col4:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), count(VALUE._col3), min(VALUE._col4) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_5.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_5.q.out index 5f2e2ca298..40205d2dfe 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_5.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_5.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MAX(csmallint), (MAX(csmallint) * -75), COUNT(*), @@ -18,7 +18,7 @@ WHERE (((cboolean2 IS NOT NULL) AND ((ctimestamp2 IS NOT NULL) AND (cstring2 LIKE 'a')))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT MAX(csmallint), (MAX(csmallint) * -75), COUNT(*), @@ -61,7 +61,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -93,10 +92,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4] Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: smallint), _col1 (type: bigint), _col2 (type: smallint), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized @@ -109,27 +106,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 5, 6, 7, 9, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: - reduceColumnSortOrder: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 5 - dataColumns: VALUE._col0:smallint, VALUE._col1:bigint, VALUE._col2:smallint, VALUE._col3:bigint, VALUE._col4:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: max(VALUE._col0), count(VALUE._col1), min(VALUE._col2), sum(VALUE._col3), max(VALUE._col4) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_6.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_6.q.out index 5b7f309b56..362d19c39b 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_6.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_6.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cfloat, cstring1, @@ -19,7 +19,7 @@ WHERE ((ctinyint != 0) AND ((cstring2 LIKE '%a') OR (cfloat <= -257)))))) PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cfloat, cstring1, @@ -61,7 +61,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -98,12 +97,6 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 4, 5, 6, 7, 10, 11] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [bigint, bigint, double, double, double, bigint, double, bigint, bigint, bigint] Stage: Stage-0 Fetch Operator diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out index 67cd1fe7f5..e5d63451e7 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_7.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, csmallint, @@ -25,7 +25,7 @@ WHERE ((ctinyint != 0) ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9 LIMIT 25 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, csmallint, @@ -75,7 +75,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -97,10 +96,8 @@ STAGE PLANS: sort order: +++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [10, 3, 1, 0, 8, 6, 14, 15, 16, 17, 19, 20, 18, 21, 23] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized @@ -113,27 +110,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 2, 3, 5, 6, 7, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint, bigint] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaaa - reduceColumnSortOrder: +++++++++++++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 15 - dataColumns: KEY.reducesinkkey0:boolean, KEY.reducesinkkey1:bigint, KEY.reducesinkkey2:smallint, KEY.reducesinkkey3:tinyint, KEY.reducesinkkey4:timestamp, KEY.reducesinkkey5:string, KEY.reducesinkkey6:bigint, KEY.reducesinkkey7:int, KEY.reducesinkkey8:smallint, KEY.reducesinkkey9:tinyint, KEY.reducesinkkey10:int, KEY.reducesinkkey11:bigint, KEY.reducesinkkey12:int, KEY.reducesinkkey13:tinyint, KEY.reducesinkkey14:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: boolean), KEY.reducesinkkey1 (type: bigint), KEY.reducesinkkey2 (type: smallint), KEY.reducesinkkey3 (type: tinyint), KEY.reducesinkkey4 (type: timestamp), KEY.reducesinkkey5 (type: string), KEY.reducesinkkey6 (type: bigint), KEY.reducesinkkey7 (type: int), KEY.reducesinkkey8 (type: smallint), KEY.reducesinkkey9 (type: tinyint), KEY.reducesinkkey10 (type: int), KEY.reducesinkkey11 (type: bigint), KEY.reducesinkkey12 (type: int), KEY.reducesinkkey9 (type: tinyint), KEY.reducesinkkey14 (type: tinyint) @@ -222,31 +206,31 @@ LIMIT 25 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -NULL -2118149242 -7196 56 1969-12-31 15:59:50.462 NULL -4236298484 0 7196 -56 -39 -15242201945432 NULL -56 0 -NULL -2121399625 -7196 27 1969-12-31 15:59:50.046 NULL -4242799250 0 7196 -27 -10 -15265591701500 NULL -27 0 -NULL -2124802690 -7196 -6 1969-12-31 15:59:57.92 NULL -4249605380 0 7196 6 23 -15290080157240 NULL 6 0 -NULL -2128720310 -7196 -52 1969-12-31 15:59:45.978 NULL -4257440620 0 7196 52 69 -15318271350760 NULL 52 0 -NULL -2132232110 -200 60 1969-12-31 15:59:47.019 NULL -4264464220 -200 200 -60 -43 -426446422000 NULL -60 0 -NULL -2132536965 -7196 9 1969-12-31 15:59:46 NULL -4265073930 0 7196 -9 8 -15345736000140 NULL -9 0 -NULL -2135141157 -7196 50 1969-12-31 15:59:50.192 NULL -4270282314 0 7196 -50 -33 -15364475765772 NULL -50 0 -NULL -2137537679 -7196 -25 1969-12-31 15:59:50.136 NULL -4275075358 0 7196 25 42 -15381721138084 NULL 25 0 -NULL -2145481991 -7196 56 1969-12-31 15:59:55.667 NULL -4290963982 0 7196 -56 -39 -15438888407236 NULL -56 0 -NULL NULL -200 -36 1969-12-31 15:59:57.241 NULL NULL -200 200 36 53 NULL NULL 36 0 -NULL NULL -200 -43 1969-12-31 15:59:53.783 NULL NULL -200 200 43 60 NULL NULL 43 0 -NULL NULL -200 -58 1969-12-31 15:59:51.115 NULL NULL -200 200 58 75 NULL NULL 58 0 -NULL NULL -200 22 1969-12-31 15:59:50.109 NULL NULL -200 200 -22 -5 NULL NULL -22 0 -NULL NULL -200 3 1969-12-31 15:59:50.489 NULL NULL -200 200 -3 14 NULL NULL -3 0 -NULL NULL -200 43 1969-12-31 15:59:57.003 NULL NULL -200 200 -43 -26 NULL NULL -43 0 -NULL NULL -200 53 1969-12-31 15:59:49.46 NULL NULL -200 200 -53 -36 NULL NULL -53 0 -NULL NULL -200 9 1969-12-31 15:59:44.108 NULL NULL -200 200 -9 8 NULL NULL -9 0 -NULL NULL -7196 -38 1969-12-31 15:59:53.503 NULL NULL 0 7196 38 55 NULL NULL 38 0 -NULL NULL -7196 -49 1969-12-31 15:59:51.009 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -49 1969-12-31 15:59:52.052 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -50 1969-12-31 15:59:52.424 NULL NULL 0 7196 50 67 NULL NULL 50 0 -NULL NULL -7196 -61 1969-12-31 15:59:44.823 NULL NULL 0 7196 61 78 NULL NULL 61 0 -NULL NULL -7196 1 1969-12-31 15:59:48.361 NULL NULL 0 7196 -1 16 NULL NULL -1 0 -NULL NULL -7196 14 1969-12-31 15:59:50.291 NULL NULL 0 7196 -14 3 NULL NULL -14 0 -NULL NULL -7196 22 1969-12-31 15:59:52.699 NULL NULL 0 7196 -22 -5 NULL NULL -22 0 +true NULL -15892 29 1969-12-31 15:59:57.937 821UdmGbkEf4j NULL -215 15892 -29 -12 NULL 171 -29 0 +true NULL -15899 50 1969-12-31 15:59:46.926 821UdmGbkEf4j NULL -222 15899 -50 -33 NULL 10210 -50 0 +true NULL -15903 -2 1969-12-31 15:59:46.371 cvLH6Eat2yFsyy7p NULL -226 15903 2 19 NULL 14465 2 0 +true NULL -15920 -64 1969-12-31 15:59:51.859 cvLH6Eat2yFsyy7p NULL -243 15920 64 81 NULL 6687 64 0 +true NULL -15922 -17 1969-12-31 15:59:46.164 821UdmGbkEf4j NULL -245 15922 17 34 NULL 10851 17 0 +true NULL -15923 49 1969-12-31 15:59:47.323 cvLH6Eat2yFsyy7p NULL -246 15923 -49 -32 NULL 2628 -49 0 +true NULL -15935 -6 1969-12-31 15:59:45.859 1cGVWH7n1QU NULL -1 15935 6 23 NULL 12046 6 0 +true NULL -15948 31 1969-12-31 15:59:47.577 821UdmGbkEf4j NULL -14 15948 -31 -14 NULL 7799 -31 0 +true NULL -15948 6 1969-12-31 15:59:49.269 1cGVWH7n1QU NULL -14 15948 -6 11 NULL 12436 -6 0 +true NULL -15980 -6 1969-12-31 15:59:54.84 1cGVWH7n1QU NULL -46 15980 6 23 NULL 14836 6 0 +true NULL -15999 4 1969-12-31 15:59:46.491 1cGVWH7n1QU NULL -65 15999 -4 13 NULL 1231 -4 0 +true NULL -16017 -21 1969-12-31 15:59:44.02 821UdmGbkEf4j NULL -83 16017 21 38 NULL 2282 21 0 +true NULL -16025 -42 1969-12-31 15:59:54.534 cvLH6Eat2yFsyy7p NULL -91 16025 42 59 NULL 14242 42 0 +true NULL -16036 -15 1969-12-31 15:59:58.681 1cGVWH7n1QU NULL -102 16036 15 32 NULL 7928 15 0 +true NULL -16059 -35 1969-12-31 15:59:53.038 821UdmGbkEf4j NULL -125 16059 35 52 NULL 12437 35 0 +true NULL -16076 59 1969-12-31 15:59:55.023 821UdmGbkEf4j NULL -142 16076 -59 -42 NULL 7907 -59 0 +true NULL -16122 50 1969-12-31 15:59:51.608 1cGVWH7n1QU NULL -188 16122 -50 -33 NULL 1828 -50 0 +true NULL -16123 -20 1969-12-31 15:59:51.177 1cGVWH7n1QU NULL -189 16123 20 37 NULL 2217 20 0 +true NULL -16153 35 1969-12-31 15:59:52.036 1cGVWH7n1QU NULL -219 16153 -35 -18 NULL 14817 -35 0 +true NULL -16169 5 1969-12-31 15:59:45.059 1cGVWH7n1QU NULL -235 16169 -5 12 NULL 6104 -5 0 +true NULL -16207 -4 1969-12-31 15:59:45.956 cvLH6Eat2yFsyy7p NULL -16 16207 4 21 NULL 8290 4 0 +true NULL -16221 -12 1969-12-31 15:59:45.877 1cGVWH7n1QU NULL -30 16221 12 29 NULL 1378 12 0 +true NULL -16227 2 1969-12-31 15:59:44.065 821UdmGbkEf4j NULL -36 16227 -2 15 NULL 9761 -2 0 +true NULL -16305 3 1969-12-31 15:59:43.878 1cGVWH7n1QU NULL -114 16305 -3 14 NULL 8491 -3 0 +true NULL -16339 15 1969-12-31 15:59:53.966 821UdmGbkEf4j NULL -148 16339 -15 2 NULL 12588 -15 0 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_8.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_8.q.out index 8de1d3b6e8..b10b550009 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_8.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_8.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cdouble, cboolean1, @@ -23,7 +23,7 @@ WHERE (((cstring2 IS NOT NULL) ORDER BY ctimestamp1, cdouble, cboolean1, cstring1, cfloat, c1, c2, c3, c4, c5, c6, c7, c8, c9 LIMIT 20 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cdouble, cboolean1, @@ -71,7 +71,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -93,10 +92,8 @@ STAGE PLANS: sort order: ++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [8, 5, 10, 6, 4, 13, 14, 15, 17, 19, 16, 18, 20, 22] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized @@ -109,27 +106,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [2, 3, 4, 5, 6, 7, 8, 9, 10] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double, double, double, double, double, double, double, double, double, double, double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaa - reduceColumnSortOrder: ++++++++++++++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 14 - dataColumns: KEY.reducesinkkey0:timestamp, KEY.reducesinkkey1:double, KEY.reducesinkkey2:boolean, KEY.reducesinkkey3:string, KEY.reducesinkkey4:float, KEY.reducesinkkey5:double, KEY.reducesinkkey6:double, KEY.reducesinkkey7:double, KEY.reducesinkkey8:float, KEY.reducesinkkey9:double, KEY.reducesinkkey10:double, KEY.reducesinkkey11:float, KEY.reducesinkkey12:float, KEY.reducesinkkey13:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey1 (type: double), KEY.reducesinkkey2 (type: boolean), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: float), KEY.reducesinkkey5 (type: double), KEY.reducesinkkey6 (type: double), KEY.reducesinkkey7 (type: double), KEY.reducesinkkey8 (type: float), KEY.reducesinkkey9 (type: double), KEY.reducesinkkey5 (type: double), KEY.reducesinkkey11 (type: float), KEY.reducesinkkey12 (type: float), KEY.reducesinkkey13 (type: double) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_9.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_9.q.out index a85f4d32ab..c8ad650a89 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_9.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_9.q.out @@ -1,4 +1,4 @@ -PREHOOK: query: EXPLAIN VECTORIZATION DETAIL +PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -18,7 +18,7 @@ WHERE ((cstring2 LIKE '%b%') OR (cstring1 < 'a'))) GROUP BY cstring1, cdouble, ctimestamp1 PREHOOK: type: QUERY -POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL +POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cstring1, cdouble, ctimestamp1, @@ -61,7 +61,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -98,10 +97,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6] Statistics: Num rows: 4096 Data size: 49152 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint), _col4 (type: double), _col5 (type: double), _col6 (type: double) Execution mode: vectorized @@ -114,27 +111,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [5, 6, 7, 8] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaa - reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 7 - dataColumns: KEY._col0:string, KEY._col1:double, KEY._col2:timestamp, VALUE._col0:bigint, VALUE._col1:double, VALUE._col2:double, VALUE._col3:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: count(VALUE._col0), sum(VALUE._col1), sum(VALUE._col2), min(VALUE._col3) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_div0.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_div0.q.out index f448a3e8bb..7de4d38cd3 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_div0.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_div0.q.out @@ -291,8 +291,8 @@ from alltypesparquet where cbigint > 0 and cbigint < 100000000 order by s1, s2 l POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### --985319 NULL -0.000001217879691754650 -985319 2.0297994862577501E-4 -0.000001217879691754650 +-985319 NULL -0.000001217879691754650 -63925 0.11256941728588189 -0.000018771998435666797 0 NULL NULL 0 NULL NULL @@ -517,8 +517,8 @@ POSTHOOK: Input: default@alltypesparquet -273.0 6028764.868131869 1.0 6028764.868131869 -0.01098901098901099 -0.004395604395604396 -257.0 6404096.53307393 1.0 6404096.53307393 -0.011673151750972763 -0.004669260700389105 -250.0 6583411.236 1.0 6583411.236 -0.012 -0.0048 --247.0 NULL 1.0 NULL -0.012145748987854251 -0.004858299595141701 -247.0 -7546669.174089069 1.0 -7546669.174089069 -0.012145748987854251 -0.004858299595141701 +-247.0 NULL 1.0 NULL -0.012145748987854251 -0.004858299595141701 -246.0 NULL 1.0 NULL -0.012195121951219513 -0.004878048780487805 -237.0 NULL 1.0 NULL -0.012658227848101266 -0.005063291139240506 -236.0 NULL 1.0 NULL -0.012711864406779662 -0.005084745762711864 @@ -546,18 +546,18 @@ POSTHOOK: Input: default@alltypesparquet -132.0 NULL 1.0 NULL -0.022727272727272728 -0.00909090909090909 -129.0 1.2758548906976745E7 1.0 1.2758548906976745E7 -0.023255813953488372 -0.009302325581395349 -128.0 NULL 1.0 NULL -0.0234375 -0.009375 --126.0 NULL 1.0 NULL -0.023809523809523808 -0.009523809523809523 -126.0 -1.4793867349206349E7 1.0 -1.4793867349206349E7 -0.023809523809523808 -0.009523809523809523 +-126.0 NULL 1.0 NULL -0.023809523809523808 -0.009523809523809523 -116.0 NULL 1.0 NULL -0.02586206896551724 -0.010344827586206896 --113.0 NULL 1.0 NULL -0.02654867256637168 -0.010619469026548672 -113.0 -1.6495816690265486E7 1.0 -1.6495816690265486E7 -0.02654867256637168 -0.010619469026548672 +-113.0 NULL 1.0 NULL -0.02654867256637168 -0.010619469026548672 -96.0 NULL 1.0 NULL -0.03125 -0.012499999999999999 -94.0 -1.9830077510638297E7 1.0 -1.9830077510638297E7 -0.031914893617021274 -0.01276595744680851 -93.0 NULL 1.0 NULL -0.03225806451612903 -0.012903225806451613 -77.0 2.4513789038961038E7 1.0 2.4513789038961038E7 -0.03896103896103896 -0.015584415584415584 -69.0 2.735596747826087E7 1.0 2.735596747826087E7 -0.043478260869565216 -0.017391304347826087 --62.0 NULL 1.0 NULL -0.04838709677419355 -0.01935483870967742 -62.0 3.0444544451612905E7 1.0 3.0444544451612905E7 -0.04838709677419355 -0.01935483870967742 +-62.0 NULL 1.0 NULL -0.04838709677419355 -0.01935483870967742 -60.0 NULL 1.0 NULL -0.05 -0.02 -57.0 -3.27022330877193E7 1.0 -3.27022330877193E7 -0.05263157894736842 -0.021052631578947368 -49.0 3.35888328367347E7 1.0 3.35888328367347E7 -0.061224489795918366 -0.024489795918367346 diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_limit.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_limit.q.out index b2c0a64acb..4c6fbcaaac 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_limit.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_limit.q.out @@ -72,10 +72,10 @@ POSTHOOK: Input: default@alltypesparquet -1887561756 10361.0 -1887561756 1839.0 -1887561756 9531.0 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble,csmallint from alltypesparquet where ctinyint is not null order by ctinyint,cdouble limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble,csmallint from alltypesparquet where ctinyint is not null order by ctinyint,cdouble limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -101,7 +101,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -122,10 +121,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 5] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.3 value expressions: _col2 (type: smallint) @@ -139,27 +136,14 @@ STAGE PLANS: allNative: true usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 1, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 3 - dataColumns: KEY.reducesinkkey0:tinyint, KEY.reducesinkkey1:double, VALUE._col0:smallint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: double), VALUE._col0 (type: smallint) @@ -220,10 +204,10 @@ POSTHOOK: Input: default@alltypesparquet -64 -7196.0 -7196 -64 -8080.0 -8080 -64 -9842.0 -9842 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,avg(cdouble + 1) from alltypesparquet group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,avg(cdouble + 1) from alltypesparquet group by ctinyint order by ctinyint limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -248,7 +232,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint), (cdouble + 1.0D) (type: double) outputColumnNames: _col0, _col1 @@ -278,11 +261,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [1, 2] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.3 value expressions: _col1 (type: double), _col2 (type: bigint) @@ -296,27 +276,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [double] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 3 - dataColumns: KEY._col0:tinyint, VALUE._col0:double, VALUE._col1:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0), count(VALUE._col1) @@ -372,6 +339,7 @@ POSTHOOK: query: select ctinyint,avg(cdouble + 1) from alltypesparquet group by POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### +-45 326.44444444444446 -46 3033.55 -47 -574.6428571428571 -48 1672.909090909091 @@ -391,11 +359,10 @@ POSTHOOK: Input: default@alltypesparquet -62 245.69387755102042 -63 2178.7272727272725 -64 373.52941176470586 -NULL 9370.0945309795 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select distinct(ctinyint) from alltypesparquet limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select distinct(ctinyint) from alltypesparquet limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -420,7 +387,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint @@ -447,10 +413,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.3 Execution mode: vectorized @@ -463,27 +427,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 1 - dataColumns: KEY._col0:tinyint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator Group By Vectorization: @@ -548,10 +499,10 @@ POSTHOOK: Input: default@alltypesparquet -63 -64 NULL -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint, count(distinct(cdouble)) from alltypesparquet group by ctinyint order by ctinyint limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint, count(distinct(cdouble)) from alltypesparquet group by ctinyint order by ctinyint limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -576,7 +527,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Select Operator expressions: ctinyint (type: tinyint), cdouble (type: double) outputColumnNames: ctinyint, cdouble @@ -603,11 +553,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: tinyint) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -619,27 +566,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY._col0:tinyint, KEY._col1:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator Group By Vectorization: @@ -698,6 +632,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) from alltypesparquet POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -717,11 +652,10 @@ POSTHOOK: Input: default@alltypesparquet -62 27 -63 19 -64 24 -NULL 2932 -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble from alltypesparquet order by ctinyint limit 0 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select ctinyint,cdouble from alltypesparquet order by ctinyint limit 0 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -746,10 +680,10 @@ POSTHOOK: query: select ctinyint,cdouble from alltypesparquet order by ctinyint POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesparquet #### A masked pattern was here #### -PREHOOK: query: explain vectorization detail +PREHOOK: query: explain VECTORIZATION EXPRESSION select cdouble, sum(ctinyint) as sum from alltypesparquet where ctinyint is not null group by cdouble order by sum, cdouble limit 20 PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization detail +POSTHOOK: query: explain VECTORIZATION EXPRESSION select cdouble, sum(ctinyint) as sum from alltypesparquet where ctinyint is not null group by cdouble order by sum, cdouble limit 20 POSTHOOK: type: QUERY PLAN VECTORIZATION: @@ -776,7 +710,6 @@ STAGE PLANS: Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true - vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] Filter Operator Filter Vectorization: className: VectorFilterOperator @@ -804,10 +737,8 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1] Statistics: Num rows: 12288 Data size: 147456 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized @@ -820,27 +751,14 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 12 - includeColumns: [0, 5] - dataColumns: ctinyint:tinyint, csmallint:smallint, cint:int, cbigint:bigint, cfloat:float, cdouble:double, cstring1:string, cstring2:string, ctimestamp1:timestamp, ctimestamp2:timestamp, cboolean1:boolean, cboolean2:boolean - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reducer 2 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a - reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY._col0:double, VALUE._col0:bigint - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Group By Operator aggregations: sum(VALUE._col0) @@ -861,10 +779,8 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [1, 0] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 6144 Data size: 73728 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.3 Reducer 3 @@ -872,16 +788,9 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa - reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false vectorized: true - rowBatchContext: - dataColumnCount: 2 - dataColumns: KEY.reducesinkkey0:bigint, KEY.reducesinkkey1:double - partitionColumnCount: 0 - scratchColumnTypeNames: [] Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey1 (type: double), KEY.reducesinkkey0 (type: bigint) diff --git ql/src/test/results/clientpositive/spark/parquet_vectorization_part_project.q.out ql/src/test/results/clientpositive/spark/parquet_vectorization_part_project.q.out index c2b34c8785..ec4ddb9f05 100644 --- ql/src/test/results/clientpositive/spark/parquet_vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/spark/parquet_vectorization_part_project.q.out @@ -131,13 +131,13 @@ POSTHOOK: Input: default@alltypesparquet_part_n0 POSTHOOK: Input: default@alltypesparquet_part_n0@ds=2011 POSTHOOK: Input: default@alltypesparquet_part_n0@ds=2012 #### A masked pattern was here #### -NULL -NULL --15863.0 --15863.0 --14988.0 --14988.0 --14646.0 --14646.0 --14236.0 --14236.0 +-15990.0 +-15990.0 +-15918.0 +-15918.0 +-15890.0 +-15890.0 +-14305.0 +-14305.0 +-12514.0 +-12514.0 diff --git ql/src/test/results/clientpositive/spark/pcr.q.out ql/src/test/results/clientpositive/spark/pcr.q.out index ca0b222f62..db54ab4873 100644 --- ql/src/test/results/clientpositive/spark/pcr.q.out +++ ql/src/test/results/clientpositive/spark/pcr.q.out @@ -84,7 +84,7 @@ STAGE PLANS: Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -288,7 +288,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -578,7 +578,7 @@ STAGE PLANS: Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 13 Data size: 104 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -784,7 +784,7 @@ STAGE PLANS: Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -992,7 +992,7 @@ STAGE PLANS: Statistics: Num rows: 16 Data size: 128 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 16 Data size: 128 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1259,7 +1259,7 @@ STAGE PLANS: Statistics: Num rows: 33 Data size: 264 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 33 Data size: 264 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1530,7 +1530,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1715,7 +1715,7 @@ STAGE PLANS: Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1944,7 +1944,7 @@ STAGE PLANS: Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2244,7 +2244,7 @@ STAGE PLANS: Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2600,7 +2600,7 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2889,7 +2889,7 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -3028,7 +3028,7 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 384 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 48 Data size: 384 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -3365,7 +3365,7 @@ STAGE PLANS: Statistics: Num rows: 30 Data size: 240 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string) - null sort order: aaa + null sort order: zzz sort order: +++ Statistics: Num rows: 30 Data size: 240 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -4161,7 +4161,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -4304,7 +4304,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -4503,7 +4503,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/ptf.q.out ql/src/test/results/clientpositive/spark/ptf.q.out index 40ac6a8b5a..98d60e5b7c 100644 --- ql/src/test/results/clientpositive/spark/ptf.q.out +++ ql/src/test/results/clientpositive/spark/ptf.q.out @@ -57,7 +57,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -84,7 +84,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -264,7 +264,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -290,7 +290,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -414,7 +414,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -536,7 +536,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -563,7 +563,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -713,7 +713,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -739,7 +739,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -892,7 +892,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -927,7 +927,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1088,7 +1088,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1259,7 +1259,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1364,7 +1364,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST, p_size DESC NULLS LAST + order by: p_name ASC NULLS LAST, p_size DESC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int partition by: p_mfgr raw input shape: @@ -1391,7 +1391,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1418,7 +1418,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1534,7 +1534,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST + order by: p_name ASC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int, p_retailprice: double partition by: p_mfgr raw input shape: @@ -1562,7 +1562,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1590,7 +1590,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1736,7 +1736,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1763,7 +1763,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1912,7 +1912,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1926,7 +1926,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1934,7 +1934,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1961,7 +1961,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1969,7 +1969,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 DESC NULLS LAST, _col1 ASC NULLS FIRST + order by: _col2 DESC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1996,7 +1996,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2152,7 +2152,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2178,7 +2178,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2346,7 +2346,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2391,7 +2391,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2552,7 +2552,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2724,7 +2724,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST output shape: _col0: string, _col1: string, _col2: double partition by: _col0 raw input shape: @@ -2750,7 +2750,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -2937,7 +2937,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2989,7 +2989,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3025,7 +3025,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col3 raw input shape: window functions: @@ -3083,7 +3083,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE @@ -3107,7 +3107,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE @@ -3338,14 +3338,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3359,7 +3359,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3367,7 +3367,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3394,7 +3394,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3402,7 +3402,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3609,14 +3609,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3642,7 +3642,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3668,7 +3668,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3695,7 +3695,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3870,14 +3870,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -3903,14 +3903,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3937,7 +3937,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4113,14 +4113,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4146,7 +4146,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4160,7 +4160,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4188,7 +4188,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4393,7 +4393,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4407,7 +4407,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4415,7 +4415,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4442,7 +4442,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4450,7 +4450,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4477,7 +4477,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2, _col1 raw input shape: window functions: @@ -4648,14 +4648,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4669,7 +4669,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4697,7 +4697,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4725,7 +4725,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/ptf_matchpath.q.out ql/src/test/results/clientpositive/spark/ptf_matchpath.q.out index dad3a688f1..2151d4a3f3 100644 --- ql/src/test/results/clientpositive/spark/ptf_matchpath.q.out +++ ql/src/test/results/clientpositive/spark/ptf_matchpath.q.out @@ -94,7 +94,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: _col6 raw input shape: @@ -218,7 +218,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col6 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: 0 raw input shape: @@ -340,7 +340,7 @@ STAGE PLANS: input alias: ptf_1 arguments: 'LATE.LATE+', 'LATE', (_col5 > 15.0), 'origin_city_name, fl_num, year, month, day_of_month, size(tpath) as sz, tpath[0].day_of_month as tpath' name: matchpath - order by: _col6 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST + order by: _col6 ASC NULLS LAST, _col2 ASC NULLS LAST, _col3 ASC NULLS LAST, _col4 ASC NULLS LAST output shape: tpath: int partition by: 0 raw input shape: diff --git ql/src/test/results/clientpositive/spark/ptf_streaming.q.out ql/src/test/results/clientpositive/spark/ptf_streaming.q.out index 5e7f81581f..a6fd11deb9 100644 --- ql/src/test/results/clientpositive/spark/ptf_streaming.q.out +++ ql/src/test/results/clientpositive/spark/ptf_streaming.q.out @@ -56,7 +56,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -82,7 +82,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -260,7 +260,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -286,7 +286,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -426,7 +426,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -548,7 +548,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: p_name ASC NULLS FIRST, p_size DESC NULLS LAST + order by: p_name ASC NULLS LAST, p_size DESC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int partition by: p_mfgr raw input shape: @@ -575,7 +575,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -601,7 +601,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -717,7 +717,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: p_name ASC NULLS FIRST + order by: p_name ASC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int, p_retailprice: double partition by: p_mfgr raw input shape: @@ -745,7 +745,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -772,7 +772,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -920,7 +920,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -934,7 +934,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -942,7 +942,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -969,7 +969,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -977,7 +977,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1003,7 +1003,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1153,7 +1153,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1167,7 +1167,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1175,7 +1175,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1202,7 +1202,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1210,7 +1210,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1236,7 +1236,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1386,7 +1386,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1400,7 +1400,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1408,7 +1408,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1435,7 +1435,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1443,7 +1443,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1469,7 +1469,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1637,7 +1637,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noopstreaming - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -1682,7 +1682,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1865,14 +1865,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1886,7 +1886,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -1894,7 +1894,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -1921,7 +1921,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -1929,7 +1929,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2134,14 +2134,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2167,7 +2167,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2193,7 +2193,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -2219,7 +2219,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2391,14 +2391,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2412,7 +2412,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2440,7 +2440,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmapstreaming - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -2467,7 +2467,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/sample6.q.out ql/src/test/results/clientpositive/spark/sample6.q.out index 153f0fd4a8..f710ea4120 100644 --- ql/src/test/results/clientpositive/spark/sample6.q.out +++ ql/src/test/results/clientpositive/spark/sample6.q.out @@ -488,7 +488,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -902,7 +902,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -1539,7 +1539,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2058,7 +2058,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2517,7 +2517,7 @@ STAGE PLANS: Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -2902,7 +2902,7 @@ STAGE PLANS: Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -3178,7 +3178,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) - null sort order: aa + null sort order: zz sort order: ++ Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/semijoin.q.out ql/src/test/results/clientpositive/spark/semijoin.q.out index 5455b36c7a..87d8d4081e 100644 --- ql/src/test/results/clientpositive/spark/semijoin.q.out +++ ql/src/test/results/clientpositive/spark/semijoin.q.out @@ -1885,7 +1885,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Semi Join 1 to 2 keys: 0 key (type: int) @@ -2284,7 +2284,7 @@ STAGE PLANS: Join Operator condition map: Left Semi Join 0 to 1 - Outer Join 0 to 2 + Full Outer Join 0 to 2 keys: 0 key (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/spark/skewjoin_union_remove_1.q.out ql/src/test/results/clientpositive/spark/skewjoin_union_remove_1.q.out index 0ac1843dbb..7d4b5c5979 100644 --- ql/src/test/results/clientpositive/spark/skewjoin_union_remove_1.q.out +++ ql/src/test/results/clientpositive/spark/skewjoin_union_remove_1.q.out @@ -324,14 +324,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n57 POSTHOOK: Input: default@t2_n35 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: create table DEST1_n58(key1 STRING, val1 STRING, key2 STRING, val2 STRING) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default @@ -674,11 +674,11 @@ ORDER BY key1, key2, val1, val2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1_n58 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 diff --git ql/src/test/results/clientpositive/spark/skewjoinopt1.q.out ql/src/test/results/clientpositive/spark/skewjoinopt1.q.out index e8ce401bc6..48b77927ca 100644 --- ql/src/test/results/clientpositive/spark/skewjoinopt1.q.out +++ ql/src/test/results/clientpositive/spark/skewjoinopt1.q.out @@ -324,14 +324,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n101 POSTHOOK: Input: default@t2_n64 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: EXPLAIN SELECT count(1) FROM T1_n101 a JOIN T2_n64 b ON a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/spark/skewjoinopt15.q.out ql/src/test/results/clientpositive/spark/skewjoinopt15.q.out index 14f302e2d0..f85c0b7edc 100644 --- ql/src/test/results/clientpositive/spark/skewjoinopt15.q.out +++ ql/src/test/results/clientpositive/spark/skewjoinopt15.q.out @@ -356,14 +356,14 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n109 POSTHOOK: Input: default@t2_n66 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 2 12 2 22 3 13 3 13 8 18 8 18 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 PREHOOK: query: EXPLAIN SELECT count(1) FROM T1_n109 a JOIN T2_n66 b ON a.key = b.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/spark/skewjoinopt3.q.out ql/src/test/results/clientpositive/spark/skewjoinopt3.q.out index 6eb23e084f..31bcf23b71 100644 --- ql/src/test/results/clientpositive/spark/skewjoinopt3.q.out +++ ql/src/test/results/clientpositive/spark/skewjoinopt3.q.out @@ -281,7 +281,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 1 Data size: 330 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -295,7 +295,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 outputColumnNames: _col0, _col1, _col2, _col3 Statistics: Num rows: 1 Data size: 330 Basic stats: COMPLETE Column stats: NONE File Output Operator @@ -324,8 +324,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@t1_n12 POSTHOOK: Input: default@t2_n7 #### A masked pattern was here #### -NULL NULL 4 14 -NULL NULL 5 15 1 11 NULL NULL 2 12 2 22 3 13 3 13 @@ -334,3 +332,5 @@ NULL NULL 5 15 8 18 8 18 8 28 8 18 8 28 8 18 +NULL NULL 4 14 +NULL NULL 5 15 diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_1.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_1.q.out index 183ba44fa6..8175984214 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_1.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_1.q.out @@ -293,7 +293,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2060 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -539,7 +539,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2080 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_13.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_13.q.out index a8bc465048..70daf4ed3e 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_13.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_13.q.out @@ -104,7 +104,7 @@ STAGE PLANS: Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -356,7 +356,7 @@ STAGE PLANS: Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_15.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_15.q.out index 7a82af285c..19f94b54ea 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_15.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_15.q.out @@ -85,7 +85,7 @@ STAGE PLANS: BucketMapJoin: true Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -315,7 +315,7 @@ STAGE PLANS: BucketMapJoin: true Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -493,7 +493,7 @@ STAGE PLANS: BucketMapJoin: true Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -822,7 +822,7 @@ STAGE PLANS: Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: ###Masked### Data size: ###Masked### Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_2.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_2.q.out index ea44efc5d6..8ce2eb2a57 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_2.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_2.q.out @@ -253,7 +253,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2220 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -503,7 +503,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2080 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_3.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_3.q.out index 11fb39ded9..80f38facff 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_3.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_3.q.out @@ -252,7 +252,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2220 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) @@ -500,7 +500,7 @@ STAGE PLANS: Statistics: Num rows: 1 Data size: 2060 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_4.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_4.q.out index eda1afa4e1..399edaf35d 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_4.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_4.q.out @@ -375,7 +375,7 @@ STAGE PLANS: Join Operator condition map: Left Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -685,7 +685,7 @@ STAGE PLANS: Join Operator condition map: Right Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -861,7 +861,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -973,7 +973,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Right Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -1082,8 +1082,8 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_5.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_5.q.out index 9abc62793a..1b71cdbf96 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_5.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_5.q.out @@ -375,7 +375,7 @@ STAGE PLANS: Join Operator condition map: Left Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -685,7 +685,7 @@ STAGE PLANS: Join Operator condition map: Right Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) @@ -861,7 +861,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Left Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -973,7 +973,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 Right Outer Join 1 to 2 keys: 0 _col0 (type: int) @@ -1082,8 +1082,8 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 - Outer Join 1 to 2 + Full Outer Join 0 to 1 + Full Outer Join 1 to 2 keys: 0 _col0 (type: int) 1 _col0 (type: int) diff --git ql/src/test/results/clientpositive/spark/smb_mapjoin_7.q.out ql/src/test/results/clientpositive/spark/smb_mapjoin_7.q.out index 610abab91b..1732927ee7 100644 --- ql/src/test/results/clientpositive/spark/smb_mapjoin_7.q.out +++ ql/src/test/results/clientpositive/spark/smb_mapjoin_7.q.out @@ -631,7 +631,7 @@ STAGE PLANS: Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Sorted Merge Bucket Map Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 key (type: int) 1 key (type: int) diff --git ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out index 1a8ee5bdbe..1a8583dbbe 100644 --- ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out +++ ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out @@ -2128,7 +2128,7 @@ Stage-0 Filter Operator [FIL_23] (rows=26 width=491) predicate:first_value_window_0 is not null PTF Operator [PTF_10] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_9] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 4 [PARTITION-LEVEL SORT] @@ -2558,7 +2558,7 @@ Stage-0 Select Operator [SEL_4] (rows=20 width=64) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10"] PTF Operator [PTF_3] (rows=20 width=621) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Select Operator [SEL_2] (rows=20 width=621) Output:["_col0","_col1","_col2","_col3"] <-Map 1 [PARTITION-LEVEL SORT] @@ -2585,7 +2585,7 @@ Stage-0 Select Operator [SEL_4] (rows=25 width=179) Output:["_col0","_col1","_col2"] PTF Operator [PTF_3] (rows=25 width=443) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Select Operator [SEL_2] (rows=25 width=443) Output:["_col0","_col1"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4057,14 +4057,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4102,14 +4102,14 @@ Stage-0 Select Operator [SEL_14] (rows=27 width=227) Output:["_col0","_col1","_col2","_col3"] PTF Operator [PTF_13] (rows=27 width=223) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_12] (rows=27 width=223) Output:["_col1","_col2","_col5"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_11] PartitionCols:_col2 PTF Operator [PTF_10] (rows=27 width=223) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_9] (rows=27 width=223) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4167,14 +4167,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4218,14 +4218,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] PTF Operator [PTF_6] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4272,7 +4272,7 @@ Stage-0 Select Operator [SEL_12] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6"] PTF Operator [PTF_11] (rows=26 width=223) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col0"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col0"}] Group By Operator [GBY_8] (rows=26 width=223) Output:["_col0","_col1","_col2"],keys:KEY._col0, KEY._col1, KEY._col2 <-Reducer 3 [GROUP PARTITION-LEVEL SORT] @@ -4286,7 +4286,7 @@ Stage-0 Select Operator [SEL_4] (rows=26 width=491) Output:["_col1","_col2","_col5"] PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4331,7 +4331,7 @@ Stage-0 <-Filter Operator [FIL_12] (rows=26 width=887) predicate:_col0 is not null PTF Operator [PTF_4] (rows=26 width=887) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=887) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8"] <- Please refer to the previous Map 1 [PARTITION-LEVEL SORT] @@ -4374,21 +4374,21 @@ Stage-0 Select Operator [SEL_8] (rows=26 width=227) Output:["_col0","_col1","_col2","_col3"] PTF Operator [PTF_7] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST, _col5 DESC NULLS LAST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST, _col5 DESC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_6] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_5] PartitionCols:_col2 PTF Operator [PTF_4] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS FIRST, _col5 DESC NULLS LAST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS LAST, _col5 DESC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_2] PartitionCols:p_mfgr PTF Operator [PTF_1] (rows=26 width=223) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS FIRST, p_size DESC NULLS LAST","partition by:":"p_mfgr"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS LAST, p_size DESC NULLS LAST","partition by:":"p_mfgr"}}] TableScan [TS_0] (rows=26 width=223) default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size"] @@ -4425,21 +4425,21 @@ Stage-0 Select Operator [SEL_8] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_7] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_6] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_5] PartitionCols:_col2 PTF Operator [PTF_4] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_2] PartitionCols:p_mfgr PTF Operator [PTF_1] (rows=26 width=231) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS FIRST","partition by:":"p_mfgr"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"p_name ASC NULLS LAST","partition by:":"p_mfgr"}}] TableScan [TS_0] (rows=26 width=231) default@part,part,Tbl:COMPLETE,Col:COMPLETE,Output:["p_name","p_mfgr","p_size","p_retailprice"] @@ -4476,14 +4476,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4528,23 +4528,23 @@ Stage-0 Select Operator [SEL_11] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_10] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_9] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 3 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_8] PartitionCols:_col2 PTF Operator [PTF_7] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_6] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_5] PartitionCols:_col2 PTF Operator [PTF_4] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}}] PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 DESC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4594,14 +4594,14 @@ Stage-0 Select Operator [SEL_7] (rows=26 width=235) Output:["_col0","_col1","_col2","_col3"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4649,7 +4649,7 @@ Stage-0 Select Operator [SEL_13] (rows=27 width=259) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8"] PTF Operator [PTF_12] (rows=27 width=767) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_11] (rows=27 width=767) Output:["_col1","_col2","_col5","_col7"] <-Reducer 2 [PARTITION-LEVEL SORT] @@ -4666,7 +4666,7 @@ Stage-0 <-Filter Operator [FIL_16] (rows=26 width=503) predicate:_col0 is not null PTF Operator [PTF_4] (rows=26 width=503) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_3] (rows=26 width=503) Output:["_col0","_col1","_col2","_col5","_col7"] <- Please refer to the previous Map 1 [PARTITION-LEVEL SORT] @@ -4798,14 +4798,14 @@ Stage-3 Select Operator [SEL_7] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_6] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_5] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Reducer 6 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_24] PartitionCols:_col2 PTF Operator [PTF_22] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_21] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4819,7 +4819,7 @@ Stage-3 Select Operator [SEL_17] (rows=26 width=247) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"] PTF Operator [PTF_16] (rows=26 width=499) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST","partition by:":"_col3"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col3 ASC NULLS LAST, _col2 ASC NULLS LAST","partition by:":"_col3"}] Select Operator [SEL_15] (rows=26 width=499) Output:["_col0","_col2","_col3","_col6"] <-Reducer 4 [PARTITION-LEVEL SORT] @@ -4828,14 +4828,14 @@ Stage-3 Select Operator [SEL_13] (rows=26 width=491) Output:["sum_window_0","_col1","_col2","_col5"] PTF Operator [PTF_12] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col5 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_11] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 7 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_27] PartitionCols:_col2 PTF Operator [PTF_26] (rows=26 width=499) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_25] (rows=26 width=499) Output:["_col1","_col2","_col5","_col7"] <- Please refer to the previous Map 1 [PARTITION-LEVEL SORT] @@ -4903,16 +4903,16 @@ Stage-0 PARTITION-LEVEL SORT [RS_8] PartitionCols:_col2, _col1 PTF Operator [PTF_7] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] Select Operator [SEL_6] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_5] PartitionCols:_col2, _col1 PTF Operator [PTF_4] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noopwithmap","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] @@ -4972,28 +4972,28 @@ Stage-0 Select Operator [SEL_13] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_12] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_11] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 4 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_10] PartitionCols:_col2 PTF Operator [PTF_9] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_8] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 3 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_7] PartitionCols:_col2 PTF Operator [PTF_6] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] Select Operator [SEL_5] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2, _col1 PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] @@ -5048,21 +5048,21 @@ Stage-0 Select Operator [SEL_10] (rows=26 width=239) Output:["_col0","_col1","_col2","_col3","_col4","_col5"] PTF Operator [PTF_9] (rows=26 width=491) - Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS FIRST","partition by:":"_col2"}] + Function definitions:[{},{"name:":"windowingtablefunction","order by:":"_col1 ASC NULLS LAST","partition by:":"_col2"}] Select Operator [SEL_8] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 3 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_7] PartitionCols:_col2 PTF Operator [PTF_6] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST","partition by:":"_col2"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST","partition by:":"_col2"}}] Select Operator [SEL_5] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Reducer 2 [PARTITION-LEVEL SORT] PARTITION-LEVEL SORT [RS_4] PartitionCols:_col2 PTF Operator [PTF_3] (rows=26 width=491) - Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST","partition by:":"_col2, _col1"}}] + Function definitions:[{},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}},{"Partition table definition":{"name:":"noop","order by:":"_col2 ASC NULLS LAST, _col1 ASC NULLS LAST","partition by:":"_col2, _col1"}}] Select Operator [SEL_2] (rows=26 width=491) Output:["_col1","_col2","_col5"] <-Map 1 [PARTITION-LEVEL SORT] diff --git ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out index 352e74f48b..5954629fc4 100644 --- ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out +++ ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out @@ -89,10 +89,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -372,10 +371,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -424,10 +422,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -575,10 +572,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -627,10 +623,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -856,11 +851,10 @@ STAGE PLANS: Map-reduce partition columns: day(CAST( _col0 AS DATE)) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [6] + keyColumns: 6:int keyExpressions: VectorUDFDayOfMonthDate(col 5, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 5:date) -> 6:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -909,11 +903,10 @@ STAGE PLANS: Map-reduce partition columns: day(CAST( _col0 AS DATE)) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [4] + keyColumns: 4:int keyExpressions: VectorUDFDayOfMonthDate(col 3, field DAY_OF_MONTH)(children: CastStringToDate(col 0:string) -> 3:date) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1060,11 +1053,10 @@ STAGE PLANS: Map-reduce partition columns: day(CAST( _col0 AS DATE)) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [6] + keyColumns: 6:int keyExpressions: VectorUDFDayOfMonthDate(col 5, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 5:date) -> 6:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1113,11 +1105,10 @@ STAGE PLANS: Map-reduce partition columns: day(CAST( _col0 AS DATE)) (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [4] + keyColumns: 4:int keyExpressions: VectorUDFDayOfMonthDate(col 3, field DAY_OF_MONTH)(children: CastStringToDate(col 0:string) -> 3:date) -> 4:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1342,11 +1333,10 @@ STAGE PLANS: Map-reduce partition columns: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [6] + keyColumns: 6:bigint keyExpressions: FuncAbsLongToLong(col 5:bigint)(children: LongColAddLongScalar(col 6:bigint, val 10)(children: LongColUnaryMinus(col 5:bigint)(children: CastStringToLong(col 8:string)(children: StringGroupColConcatStringScalar(col 7:string, val 0)(children: CastLongToString(col 6:int)(children: VectorUDFDayOfMonthDate(col 5, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 5:date) -> 6:int) -> 7:string) -> 8:string) -> 5:bigint) -> 6:bigint) -> 5:bigint) -> 6:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1395,11 +1385,10 @@ STAGE PLANS: Map-reduce partition columns: abs(((- UDFToLong(concat(CAST( day(CAST( _col0 AS DATE)) AS STRING), '0'))) + 10)) (type: bigint) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [4] + keyColumns: 4:bigint keyExpressions: FuncAbsLongToLong(col 3:bigint)(children: LongColAddLongScalar(col 4:bigint, val 10)(children: LongColUnaryMinus(col 3:bigint)(children: CastStringToLong(col 6:string)(children: StringGroupColConcatStringScalar(col 5:string, val 0)(children: CastLongToString(col 4:int)(children: VectorUDFDayOfMonthDate(col 3, field DAY_OF_MONTH)(children: CastStringToDate(col 0:string) -> 3:date) -> 4:int) -> 5:string) -> 6:string) -> 3:bigint) -> 4:bigint) -> 3:bigint) -> 4:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1616,11 +1605,10 @@ STAGE PLANS: Map-reduce partition columns: CAST( UDFToShort(day(CAST( _col0 AS DATE))) AS decimal(10,0)) (type: decimal(10,0)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [7] + keyColumns: 7:decimal(10,0) keyExpressions: CastLongToDecimal(col 6:smallint)(children: VectorUDFDayOfMonthDate(col 5, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 5:date) -> 6:int) -> 7:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1669,11 +1657,10 @@ STAGE PLANS: Map-reduce partition columns: CAST( day(CAST( _col0 AS DATE)) AS decimal(10,0)) (type: decimal(10,0)) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [5] + keyColumns: 5:decimal(10,0) keyExpressions: CastLongToDecimal(col 4:int)(children: VectorUDFDayOfMonthDate(col 3, field DAY_OF_MONTH)(children: CastStringToDate(col 0:string) -> 3:date) -> 4:int) -> 5:decimal(10,0) native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1955,10 +1942,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:string Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized @@ -2008,10 +1995,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2059,10 +2045,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2237,10 +2222,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:string Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized @@ -2290,10 +2275,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2341,10 +2325,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2616,10 +2599,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 3] + keyColumns: 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2668,10 +2650,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col2 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 2] + keyColumns: 0:string, 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2819,10 +2800,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 3] + keyColumns: 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -2871,10 +2851,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col2 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 2] + keyColumns: 0:string, 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3100,10 +3079,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3152,10 +3130,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3303,10 +3280,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3355,10 +3331,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3585,11 +3560,10 @@ STAGE PLANS: Map-reduce partition columns: UDFToDouble(_col0) (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [5] + keyColumns: 5:double keyExpressions: CastStringToDouble(col 3:string) -> 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3638,11 +3612,10 @@ STAGE PLANS: Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / 2.0D))) (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [3] + keyColumns: 3:double keyExpressions: CastLongToDouble(col 4:int)(children: CastDoubleToLong(col 3:double)(children: DoubleColDivideDoubleScalar(col 0:double, val 2.0) -> 3:double) -> 4:int) -> 3:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3859,11 +3832,10 @@ STAGE PLANS: Map-reduce partition columns: (UDFToDouble(_col0) * 2.0D) (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [6] + keyColumns: 6:double keyExpressions: DoubleColMultiplyDoubleScalar(col 5:double, val 2.0)(children: CastStringToDouble(col 3:string) -> 5:double) -> 6:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3912,10 +3884,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4063,11 +4034,10 @@ STAGE PLANS: Map-reduce partition columns: UDFToDouble(_col0) (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [5] + keyColumns: 5:double keyExpressions: CastStringToDouble(col 3:string) -> 5:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4116,11 +4086,10 @@ STAGE PLANS: Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / 2.0D))) (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [3] + keyColumns: 3:double keyExpressions: CastLongToDouble(col 4:int)(children: CastDoubleToLong(col 3:double)(children: DoubleColDivideDoubleScalar(col 0:double, val 2.0) -> 3:double) -> 4:int) -> 3:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4268,11 +4237,10 @@ STAGE PLANS: Map-reduce partition columns: (UDFToDouble(_col0) * 2.0D) (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [6] + keyColumns: 6:double keyExpressions: DoubleColMultiplyDoubleScalar(col 5:double, val 2.0)(children: CastStringToDouble(col 3:string) -> 5:double) -> 6:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4321,10 +4289,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4551,11 +4518,10 @@ STAGE PLANS: Map-reduce partition columns: CAST( (UDFToDouble(_col0) * 2.0D) AS STRING) (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [7] + keyColumns: 7:string keyExpressions: CastDoubleToString(col 6:double)(children: DoubleColMultiplyDoubleScalar(col 5:double, val 2.0)(children: CastStringToDouble(col 3:string) -> 5:double) -> 6:double) -> 7:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4604,11 +4570,10 @@ STAGE PLANS: Map-reduce partition columns: CAST( _col0 AS STRING) (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [4] + keyColumns: 4:string keyExpressions: CastDoubleToString(col 0:double) -> 4:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4763,10 +4728,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4818,10 +4781,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -4939,10 +4901,8 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -5015,10 +4975,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2, 3] + valueColumns: 2:string, 3:string Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Execution mode: vectorized @@ -5066,10 +5025,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 2] + valueColumns: 0:string, 2:string Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col2 (type: string) Execution mode: vectorized @@ -5316,10 +5274,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 3] + keyColumns: 2:string, 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -5368,10 +5325,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col2 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 2] + keyColumns: 0:string, 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -5588,10 +5544,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -5640,10 +5595,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -5848,10 +5802,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -5891,10 +5844,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6092,10 +6044,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6144,10 +6095,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6410,10 +6360,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3] + valueColumns: 3:string Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Execution mode: vectorized @@ -6463,10 +6413,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6514,10 +6463,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6695,10 +6643,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6746,10 +6693,10 @@ STAGE PLANS: Map-reduce partition columns: _col1 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [3] + keyColumns: 3:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [2] + valueColumns: 2:string Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -6796,10 +6743,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -6973,10 +6919,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7028,10 +6973,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7224,10 +7168,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -7278,10 +7221,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7333,10 +7275,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7470,10 +7411,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reducer 7 Execution mode: vectorized @@ -7528,10 +7468,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -7610,10 +7549,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7665,10 +7603,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7861,10 +7798,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -7915,10 +7851,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -7970,10 +7905,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -8109,10 +8043,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reducer 7 Execution mode: vectorized @@ -8167,10 +8100,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -8250,10 +8182,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -8305,10 +8236,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -8505,10 +8435,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -8559,10 +8488,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -8614,10 +8542,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -8671,10 +8598,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Vectorization: @@ -8751,10 +8677,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reducer 9 Execution mode: vectorized @@ -8809,10 +8734,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 @@ -8962,10 +8886,13 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [2] + bigTableKeyColumns: 2:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -8985,10 +8912,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -9204,11 +9130,14 @@ STAGE PLANS: 0 day(CAST( _col0 AS DATE)) (type: int) 1 day(CAST( _col0 AS DATE)) (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [6] + bigTableKeyColumns: 6:int bigTableKeyExpressions: VectorUDFDayOfMonthDate(col 5, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 5:date) -> 6:int + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -9228,10 +9157,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -9511,13 +9439,15 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 2:string + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:string className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 3 @@ -9529,10 +9459,13 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [3] + bigTableKeyColumns: 3:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 4 Statistics: Num rows: 2420 Data size: 25709 Basic stats: COMPLETE Column stats: NONE @@ -9552,10 +9485,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -9797,10 +9729,13 @@ STAGE PLANS: 0 _col0 (type: string), _col1 (type: string) 1 _col0 (type: string), _col2 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [2, 3] + bigTableKeyColumns: 2:string, 3:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -9820,10 +9755,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -10039,10 +9973,13 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [2] + bigTableKeyColumns: 2:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -10062,10 +9999,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -10273,11 +10209,14 @@ STAGE PLANS: 0 UDFToDouble(_col0) (type: double) 1 UDFToDouble(UDFToInteger((_col0 / 2.0D))) (type: double) Map Join Vectorization: - bigTableKeyColumnNums: [5] + bigTableKeyColumns: 5:double bigTableKeyExpressions: CastStringToDouble(col 3:string) -> 5:double + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -10297,10 +10236,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -10507,11 +10445,14 @@ STAGE PLANS: 0 (UDFToDouble(_col0) * 2.0D) (type: double) 1 _col0 (type: double) Map Join Vectorization: - bigTableKeyColumnNums: [6] + bigTableKeyColumns: 6:double bigTableKeyExpressions: DoubleColMultiplyDoubleScalar(col 5:double, val 2.0)(children: CastStringToDouble(col 3:string) -> 5:double) -> 6:double + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -10531,10 +10472,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -10683,10 +10623,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -10777,9 +10716,12 @@ STAGE PLANS: 0 1 Map Join Vectorization: + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Reducer 4 Statistics: Num rows: 500000 Data size: 11124000 Basic stats: COMPLETE Column stats: NONE @@ -10799,10 +10741,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -11013,10 +10954,13 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [2] + bigTableKeyColumns: 2:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -11036,10 +10980,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -11204,10 +11147,12 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [0] + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [] className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 3 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -11227,10 +11172,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -11394,10 +11338,12 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [0] + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [] className: VectorMapJoinOuterStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED input vertices: 0 Map 1 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE @@ -11417,10 +11363,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -11680,13 +11625,15 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 2:string + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:string className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 3 @@ -11698,10 +11645,13 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [3] + bigTableKeyColumns: 3:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 4 Statistics: Num rows: 1210 Data size: 12854 Basic stats: COMPLETE Column stats: NONE @@ -11721,10 +11671,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -11964,12 +11913,15 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col1 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [0] + bigTableKeyColumns: 0:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [4] - smallTableMapping: [4] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 4:string + smallTableValueMapping: 4:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 3 @@ -11981,10 +11933,13 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) Map Join Vectorization: - bigTableKeyColumnNums: [4] + bigTableKeyColumns: 4:string + bigTableRetainColumnNums: [] className: VectorMapJoinInnerBigOnlyStringOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nonOuterSmallTableKeyMapping: [] + hashTableImplementationType: OPTIMIZED input vertices: 1 Map 4 Statistics: Num rows: 1 Data size: 207 Basic stats: PARTIAL Column stats: NONE @@ -12004,10 +11959,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -12141,10 +12095,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -12196,10 +12149,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -12392,10 +12344,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -12446,10 +12397,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -12501,10 +12451,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:string Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized @@ -12640,10 +12589,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Reducer 7 Execution mode: vectorized @@ -12698,10 +12646,9 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [0] + keyColumns: 0:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 2 Data size: 368 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 diff --git ql/src/test/results/clientpositive/spark/subquery_in.q.out ql/src/test/results/clientpositive/spark/subquery_in.q.out index 14129277a8..b55202a40b 100644 --- ql/src/test/results/clientpositive/spark/subquery_in.q.out +++ ql/src/test/results/clientpositive/spark/subquery_in.q.out @@ -333,7 +333,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -512,7 +512,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/subquery_notin.q.out ql/src/test/results/clientpositive/spark/subquery_notin.q.out index f251835741..e1865eaa3e 100644 --- ql/src/test/results/clientpositive/spark/subquery_notin.q.out +++ ql/src/test/results/clientpositive/spark/subquery_notin.q.out @@ -416,7 +416,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -478,7 +478,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -687,7 +687,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -752,7 +752,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -956,7 +956,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1088,7 +1088,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1153,7 +1153,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/subquery_scalar.q.out ql/src/test/results/clientpositive/spark/subquery_scalar.q.out index 4c02505e9f..af325200ba 100644 --- ql/src/test/results/clientpositive/spark/subquery_scalar.q.out +++ ql/src/test/results/clientpositive/spark/subquery_scalar.q.out @@ -1036,7 +1036,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1104,7 +1104,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/union14.q.out ql/src/test/results/clientpositive/spark/union14.q.out index 6a95e4a9d8..49d6cb118f 100644 --- ql/src/test/results/clientpositive/spark/union14.q.out +++ ql/src/test/results/clientpositive/spark/union14.q.out @@ -126,20 +126,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 #### A masked pattern was here #### -278 1 -273 1 + 10 128 1 -255 1 -tst1 1 146 1 -369 1 +150 1 213 1 -311 1 224 1 238 1 -150 1 - 10 +255 1 +273 1 +278 1 +311 1 +369 1 +401 1 406 1 66 1 -401 1 98 1 +tst1 1 diff --git ql/src/test/results/clientpositive/spark/union7.q.out ql/src/test/results/clientpositive/spark/union7.q.out index 549075cf38..8556f841eb 100644 --- ql/src/test/results/clientpositive/spark/union7.q.out +++ ql/src/test/results/clientpositive/spark/union7.q.out @@ -122,20 +122,20 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@src1 #### A masked pattern was here #### -278 1 -273 1 + 10 128 1 -255 1 -tst1 1 146 1 -369 1 +150 1 213 1 -311 1 224 1 238 1 -150 1 - 10 +255 1 +273 1 +278 1 +311 1 +369 1 +401 1 406 1 66 1 -401 1 98 1 +tst1 1 diff --git ql/src/test/results/clientpositive/spark/union_null.q.out ql/src/test/results/clientpositive/spark/union_null.q.out index d37adbb8d8..696641cbd8 100644 --- ql/src/test/results/clientpositive/spark/union_null.q.out +++ ql/src/test/results/clientpositive/spark/union_null.q.out @@ -24,16 +24,16 @@ POSTHOOK: query: select x from (select * from (select value as x from src order POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -val_0 -val_0 -val_0 -val_10 -val_100 NULL NULL NULL NULL NULL +val_0 +val_0 +val_0 +val_10 +val_100 PREHOOK: query: select * from (select * from (select cast(null as string) as N from src1 group by key)a UNION ALL select * from (select cast(null as string) as N from src1 group by key)b ) a PREHOOK: type: QUERY PREHOOK: Input: default@src1 diff --git ql/src/test/results/clientpositive/spark/union_ppr.q.out ql/src/test/results/clientpositive/spark/union_ppr.q.out index f987e30762..950cbbb613 100644 --- ql/src/test/results/clientpositive/spark/union_ppr.q.out +++ ql/src/test/results/clientpositive/spark/union_ppr.q.out @@ -48,7 +48,7 @@ STAGE PLANS: Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -179,7 +179,7 @@ STAGE PLANS: Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out index e1f14d3ed7..3f2242b4c5 100644 --- ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out +++ ql/src/test/results/clientpositive/spark/union_remove_6_subq.q.out @@ -439,7 +439,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/union_view.q.out ql/src/test/results/clientpositive/spark/union_view.q.out index 591ebfaace..97a5bef15b 100644 --- ql/src/test/results/clientpositive/spark/union_view.q.out +++ ql/src/test/results/clientpositive/spark/union_view.q.out @@ -483,10 +483,10 @@ STAGE PLANS: 86 val_86 2 86 val_86 3 86 val_86 3 -86 val_86 2 -86 val_86 2 86 val_86 3 86 val_86 3 +86 val_86 2 +86 val_86 2 86 val_86 1 STAGE DEPENDENCIES: Stage-1 is a root stage diff --git ql/src/test/results/clientpositive/spark/vector_between_in.q.out ql/src/test/results/clientpositive/spark/vector_between_in.q.out index afb76b0d71..b156ff4f7f 100644 --- ql/src/test/results/clientpositive/spark/vector_between_in.q.out +++ ql/src/test/results/clientpositive/spark/vector_between_in.q.out @@ -1615,9 +1615,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate IN (CAST("1969-10-26" AS POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 6041 true 17 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1626,9 +1626,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 9165 true 9 +NULL 3115 PREHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-30" AS DATE) AND CAST("1970-01-02" AS DATE) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1637,9 +1637,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-3 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 5974 true 84 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1648,9 +1648,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AN POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 3002 true 6172 +NULL 3115 PREHOOK: query: SELECT c0, count(1) from (SELECT cdate IN (CAST("1969-10-26" AS DATE), CAST("1969-07-14" AS DATE)) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1659,9 +1659,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate IN (CAST("1969-10-26" AS POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 6041 true 17 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1670,9 +1670,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 IN (2365.8945945946, POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 9165 true 9 +NULL 3115 PREHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-30" AS DATE) AND CAST("1970-01-02" AS DATE) as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1681,9 +1681,9 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdate BETWEEN CAST("1969-12-3 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 6231 false 5974 true 84 +NULL 6231 PREHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 as c0 FROM decimal_date_test) tab GROUP BY c0 ORDER BY c0 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -1692,6 +1692,6 @@ POSTHOOK: query: SELECT c0, count(1) from (SELECT cdecimal1 NOT BETWEEN -2000 AN POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -NULL 3115 false 3002 true 6172 +NULL 3115 diff --git ql/src/test/results/clientpositive/spark/vector_data_types.q.out ql/src/test/results/clientpositive/spark/vector_data_types.q.out index 6eacd69f1f..d368d054de 100644 --- ql/src/test/results/clientpositive/spark/vector_data_types.q.out +++ ql/src/test/results/clientpositive/spark/vector_data_types.q.out @@ -175,10 +175,6 @@ POSTHOOK: query: SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_ POSTHOOK: type: QUERY POSTHOOK: Input: default@over1korc_n1 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL 374 65560 4294967516 65.43 22.48 true oscar quirinius 2013-03-01 09:11:58.703316 16.86 mathematics -NULL 409 65536 4294967490 46.97 25.92 false fred miller 2013-03-01 09:11:58.703116 33.45 history -NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703111 18.80 mathematics -3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95.39 undecided -3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88.46 wind surfing -3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75.39 education @@ -195,6 +191,10 @@ NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703 -2 461 65648 4294967425 58.52 24.85 false rachel thompson 2013-03-01 09:11:58.703318 85.62 zync studies -1 268 65778 4294967418 56.33 44.73 true calvin falkner 2013-03-01 09:11:58.70322 7.37 history -1 281 65643 4294967323 15.1 45.0 false irene nixon 2013-03-01 09:11:58.703223 80.96 undecided +-1 300 65663 4294967343 71.26 34.62 true calvin ovid 2013-03-01 09:11:58.703262 78.56 study skills +-1 348 65556 4294967413 35.17 9.51 false bob young 2013-03-01 09:11:58.70328 45.81 quiet hour +-1 372 65680 4294967490 15.45 18.09 false ethan laertes 2013-03-01 09:11:58.70311 65.88 opthamology +-1 417 65685 4294967492 28.89 5.19 true mike white 2013-03-01 09:11:58.703275 90.69 forestry PREHOOK: query: SELECT SUM(HASH(*)) FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i) as q PREHOOK: type: QUERY @@ -308,10 +308,6 @@ POSTHOOK: query: SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_ POSTHOOK: type: QUERY POSTHOOK: Input: default@over1korc_n1 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL 374 65560 4294967516 65.43 22.48 true oscar quirinius 2013-03-01 09:11:58.703316 16.86 mathematics -NULL 409 65536 4294967490 46.97 25.92 false fred miller 2013-03-01 09:11:58.703116 33.45 history -NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703111 18.80 mathematics -3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95.39 undecided -3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88.46 wind surfing -3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75.39 education @@ -328,6 +324,10 @@ NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703 -2 461 65648 4294967425 58.52 24.85 false rachel thompson 2013-03-01 09:11:58.703318 85.62 zync studies -1 268 65778 4294967418 56.33 44.73 true calvin falkner 2013-03-01 09:11:58.70322 7.37 history -1 281 65643 4294967323 15.1 45.0 false irene nixon 2013-03-01 09:11:58.703223 80.96 undecided +-1 300 65663 4294967343 71.26 34.62 true calvin ovid 2013-03-01 09:11:58.703262 78.56 study skills +-1 348 65556 4294967413 35.17 9.51 false bob young 2013-03-01 09:11:58.70328 45.81 quiet hour +-1 372 65680 4294967490 15.45 18.09 false ethan laertes 2013-03-01 09:11:58.70311 65.88 opthamology +-1 417 65685 4294967492 28.89 5.19 true mike white 2013-03-01 09:11:58.703275 90.69 forestry PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(HASH(*)) FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i) as q diff --git ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out index e3d815b1cf..6e33ead051 100644 --- ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out +++ ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out @@ -99,10 +99,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 1:bigint, 2:decimal(20,10), 3:decimal(20,10), 4:decimal(30,10), 5:bigint, 6:decimal(23,14), 7:decimal(23,14), 8:decimal(33,14), 9:bigint Statistics: Num rows: 12289 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint) Execution mode: vectorized @@ -277,10 +277,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + valueColumns: 1:bigint, 2:decimal(20,10), 3:decimal(20,10), 4:decimal(30,10), 5:double, 6:double, 7:bigint, 8:decimal(23,14), 9:decimal(23,14), 10:decimal(33,14), 11:double, 12:double, 13:bigint Statistics: Num rows: 12289 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: decimal(23,14)), _col9 (type: decimal(23,14)), _col10 (type: decimal(33,14)), _col11 (type: double), _col12 (type: double), _col13 (type: bigint) Execution mode: vectorized @@ -489,10 +489,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:bigint, 6:decimal(16,0), 7:decimal(16,0), 8:decimal(26,0), 9:bigint Statistics: Num rows: 12289 Data size: 346462 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: decimal(16,0)), _col8 (type: decimal(26,0)), _col9 (type: bigint) Execution mode: vectorized @@ -686,10 +686,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + valueColumns: 1:bigint, 2:decimal(11,5), 3:decimal(11,5), 4:decimal(21,5), 5:double, 6:double, 7:bigint, 8:decimal(16,0), 9:decimal(16,0), 10:decimal(26,0), 11:double, 12:double, 13:bigint Statistics: Num rows: 12289 Data size: 346462 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: decimal(26,0)), _col11 (type: double), _col12 (type: double), _col13 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vector_inner_join.q.out ql/src/test/results/clientpositive/spark/vector_inner_join.q.out index 54f08fe047..efab3f02c2 100644 --- ql/src/test/results/clientpositive/spark/vector_inner_join.q.out +++ ql/src/test/results/clientpositive/spark/vector_inner_join.q.out @@ -137,13 +137,15 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:int className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 0 Map 1 @@ -319,13 +321,15 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:int className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 2 @@ -517,13 +521,15 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [] className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 0] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 3:string, 0:int + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1, _col2 input vertices: 1 Map 2 @@ -687,14 +693,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:string, 1:int className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 1, 3] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:string, 1:int, 1:int, 3:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 @@ -850,14 +858,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3, 0] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int, 1:string, 3:string, 0:int + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 @@ -1022,14 +1032,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:int, 1:string, 3:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2 input vertices: 1 Map 2 @@ -1193,14 +1205,16 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col1 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [1] + bigTableValueColumns: 1:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [1, 3, 0] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [0] + projectedOutput: 1:string, 3:string, 0:int + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1, _col2, _col3 input vertices: 1 Map 2 @@ -1364,14 +1378,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:string className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [1] + projectedOutput: 0:string, 1:int, 3:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col2, _col3 input vertices: 1 Map 2 @@ -1535,14 +1551,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableRetainedColumnNums: [0, 1] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:string, 1:int className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3] - smallTableMapping: [3] + nonOuterSmallTableKeyMapping: [] + projectedOutput: 0:string, 1:int, 3:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3 input vertices: 1 Map 2 diff --git ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out index 108d326e82..63d13fa107 100644 --- ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out +++ ql/src/test/results/clientpositive/spark/vector_mapjoin_reduce.q.out @@ -206,6 +206,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3 input vertices: 1 Map 3 @@ -220,6 +221,7 @@ STAGE PLANS: className: VectorMapJoinLeftSemiLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col3 input vertices: 1 Map 4 @@ -483,6 +485,7 @@ STAGE PLANS: className: VectorMapJoinInnerLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col3, _col4 input vertices: 1 Map 3 @@ -497,6 +500,7 @@ STAGE PLANS: className: VectorMapJoinLeftSemiMultiKeyOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col3 input vertices: 1 Map 4 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out index c36c9eccc6..a35a2df4a0 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join0.q.out @@ -151,15 +151,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [1] - bigTableOuterKeyMapping: 1 -> 3 - bigTableRetainedColumnNums: [0, 1, 3] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 1:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:string, 1:int className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 3, 4] - smallTableMapping: [4] + outerSmallTableKeyMapping: 1 -> 3 + projectedOutput: 0:string, 1:int, 3:int, 4:string + smallTableValueMapping: 4:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 @@ -304,15 +305,16 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableOuterKeyMapping: 0 -> 4 - bigTableRetainedColumnNums: [0, 1, 4] - bigTableValueColumnNums: [0, 1] + bigTableKeyColumns: 0:int + bigTableRetainColumnNums: [0, 1] + bigTableValueColumns: 0:int, 1:string className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3, 4, 0, 1] - smallTableMapping: [3] + outerSmallTableKeyMapping: 0 -> 4 + projectedOutput: 3:string, 4:int, 0:int, 1:string + smallTableValueMapping: 3:string + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 0 Map 1 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out index ecac4da2e9..e8dc7440f9 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join1.q.out @@ -120,11 +120,11 @@ POSTHOOK: query: select * from small_alltypesorc3a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a POSTHOOK: Output: hdfs://### HDFS PATH ### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: select * from small_alltypesorc4a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a @@ -211,11 +211,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail select * from small_alltypesorc_a c @@ -246,7 +246,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cd - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -257,7 +257,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: className: VectorSparkHashTableSinkOperator @@ -292,7 +292,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -303,7 +303,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -311,25 +311,26 @@ STAGE PLANS: 0 _col2 (type: int) 1 _col2 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableOuterKeyMapping: 2 -> 15 - bigTableRetainedColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 15] - bigTableValueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + bigTableKeyColumns: 2:int + bigTableRetainColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + bigTableValueColumns: 0:tinyint, 1:smallint, 2:int, 3:bigint, 4:float, 5:double, 6:string, 7:string, 8:timestamp, 9:timestamp, 10:boolean, 11:boolean className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24] - smallTableMapping: [13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24] + outerSmallTableKeyMapping: 2 -> 15 + projectedOutput: 0:tinyint, 1:smallint, 2:int, 3:bigint, 4:float, 5:double, 6:string, 7:string, 8:timestamp, 9:timestamp, 10:boolean, 11:boolean, 13:tinyint, 14:smallint, 15:int, 16:bigint, 17:float, 18:double, 19:string, 20:string, 21:timestamp, 22:timestamp, 23:boolean, 24:boolean + smallTableValueMapping: 13:tinyint, 14:smallint, 16:bigint, 17:float, 18:double, 19:string, 20:string, 21:timestamp, 22:timestamp, 23:boolean, 24:boolean + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23 input vertices: 1 Map 2 - Statistics: Num rows: 16 Data size: 3831 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3891 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 16 Data size: 3831 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3891 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -387,11 +388,11 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail select c.ctinyint from small_alltypesorc_a c @@ -422,7 +423,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: hd - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -433,7 +434,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: className: VectorSparkHashTableSinkOperator @@ -468,7 +469,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -479,7 +480,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -487,23 +488,24 @@ STAGE PLANS: 0 _col0 (type: tinyint) 1 _col0 (type: tinyint) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:tinyint + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:tinyint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutput: 0:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 16 Data size: 3831 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3891 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 16 Data size: 3831 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3891 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -688,7 +690,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cd - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -699,7 +701,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: className: VectorSparkHashTableSinkOperator @@ -729,7 +731,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: hd - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -740,7 +742,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: className: VectorSparkHashTableSinkOperator @@ -777,7 +779,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -788,7 +790,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 2] - Statistics: Num rows: 15 Data size: 3483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3538 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -796,17 +798,18 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 2:int + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:tinyint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutput: 0:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 16 Data size: 3831 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3891 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -814,17 +817,18 @@ STAGE PLANS: 0 _col0 (type: tinyint) 1 _col0 (type: tinyint) Map Join Vectorization: - bigTableKeyColumnNums: [0] - bigTableRetainedColumnNums: [0] - bigTableValueColumnNums: [0] + bigTableKeyColumns: 0:tinyint + bigTableRetainColumnNums: [0] + bigTableValueColumns: 0:tinyint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [0] + projectedOutput: 0:tinyint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 17 Data size: 4214 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 4280 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(), sum(_col0) Group By Vectorization: @@ -841,10 +845,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out index 92ad63edea..217217c85f 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join2.q.out @@ -94,11 +94,11 @@ POSTHOOK: query: select * from small_alltypesorc1a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a_n0 POSTHOOK: Output: hdfs://### HDFS PATH ### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-51 NULL -1064981602 -1444011153 -51.0 NULL aY3tpnr6wfvmWMG0U881 2Ol4N3Ha0815Ej54lA2N 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1065775394 -1331703092 -51.0 NULL aD88uS2N8DmqPlvjOa7F46i7 Ut8ka2o8iokF504065PYS 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1066684273 2034191923 -51.0 NULL 2W4Kg220OcCy065HG60k6e D7GOQhc3qbAR6 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1067683781 1750003656 -51.0 NULL IbgbUvP5 47x2I874 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1071480828 -1401575336 -51.0 NULL aw724t8c5558x2xneC624 4uE7l74tESBiKfu7c8wM7GA 1969-12-31 16:00:08.451 NULL true true PREHOOK: query: select * from small_alltypesorc2a_n0 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a_n0 @@ -120,11 +120,11 @@ POSTHOOK: query: select * from small_alltypesorc3a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a_n0 POSTHOOK: Output: hdfs://### HDFS PATH ### -NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL -NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL -NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL -NULL -15632 528534767 NULL NULL -15632.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 15:59:53.593 true NULL -NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.582 1969-12-31 16:00:00.518 true NULL +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: select * from small_alltypesorc4a_n0 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a_n0 @@ -206,26 +206,26 @@ POSTHOOK: query: select * from small_alltypesorc_a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n0 POSTHOOK: Output: hdfs://### HDFS PATH ### +-51 NULL -1064981602 -1444011153 -51.0 NULL aY3tpnr6wfvmWMG0U881 2Ol4N3Ha0815Ej54lA2N 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1065775394 -1331703092 -51.0 NULL aD88uS2N8DmqPlvjOa7F46i7 Ut8ka2o8iokF504065PYS 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1066684273 2034191923 -51.0 NULL 2W4Kg220OcCy065HG60k6e D7GOQhc3qbAR6 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1067683781 1750003656 -51.0 NULL IbgbUvP5 47x2I874 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1071480828 -1401575336 -51.0 NULL aw724t8c5558x2xneC624 4uE7l74tESBiKfu7c8wM7GA 1969-12-31 16:00:08.451 NULL true true -60 -200 NULL NULL -60.0 -200.0 NULL NULL 1969-12-31 16:00:11.996 1969-12-31 15:59:55.451 NULL NULL -61 -7196 NULL NULL -61.0 -7196.0 NULL 8Mlns2Tl6E0g 1969-12-31 15:59:44.823 1969-12-31 15:59:58.174 NULL false -61 -7196 NULL NULL -61.0 -7196.0 NULL fUJIN 1969-12-31 16:00:11.842 1969-12-31 15:59:58.174 NULL false -62 -7196 NULL NULL -62.0 -7196.0 NULL jf1Cw6qhkNToQuud 1969-12-31 16:00:12.388 1969-12-31 15:59:58.174 NULL false -62 -7196 NULL NULL -62.0 -7196.0 NULL yLiOchx5PfDTFdcMduBTg 1969-12-31 16:00:02.373 1969-12-31 15:59:58.174 NULL false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true -64 -7196 NULL 406535485 -64.0 -7196.0 NULL E011i 1969-12-31 15:59:56.048 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL -NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL -NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL -NULL -15632 528534767 NULL NULL -15632.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 15:59:53.593 true NULL -NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.582 1969-12-31 16:00:00.518 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: explain vectorization detail select count(*), sum(t1.c_cbigint) from (select c.cbigint as c_cbigint from small_alltypesorc_a_n0 c @@ -262,7 +262,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: cd - Statistics: Num rows: 20 Data size: 4431 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4531 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -273,7 +273,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2] - Statistics: Num rows: 20 Data size: 4431 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4531 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: className: VectorSparkHashTableSinkOperator @@ -303,7 +303,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: hd - Statistics: Num rows: 20 Data size: 4431 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4531 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -314,7 +314,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [3] - Statistics: Num rows: 20 Data size: 4431 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4531 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator Spark Hash Table Sink Vectorization: className: VectorSparkHashTableSinkOperator @@ -351,7 +351,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 20 Data size: 4431 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4531 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -362,7 +362,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2, 3] - Statistics: Num rows: 20 Data size: 4431 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4531 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -370,17 +370,18 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) Map Join Vectorization: - bigTableKeyColumnNums: [2] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 2:int + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:bigint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + projectedOutput: 3:bigint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 22 Data size: 4874 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 4984 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -388,17 +389,18 @@ STAGE PLANS: 0 _col1 (type: bigint) 1 _col0 (type: bigint) Map Join Vectorization: - bigTableKeyColumnNums: [3] - bigTableRetainedColumnNums: [3] - bigTableValueColumnNums: [3] + bigTableKeyColumns: 3:bigint + bigTableRetainColumnNums: [3] + bigTableValueColumns: 3:bigint className: VectorMapJoinOuterLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true - projectedOutputColumnNums: [3] + projectedOutput: 3:bigint + hashTableImplementationType: OPTIMIZED outputColumnNames: _col1 input vertices: 1 Map 4 - Statistics: Num rows: 24 Data size: 5361 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 5482 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(), sum(_col1) Group By Vectorization: @@ -415,10 +417,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized @@ -504,4 +505,4 @@ left outer join small_alltypesorc_a_n0 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n0 POSTHOOK: Output: hdfs://### HDFS PATH ### -34 -26289186744 +24 -3110813706 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out index a226f448c4..e742ff31f5 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join3.q.out @@ -94,11 +94,11 @@ POSTHOOK: query: select * from small_alltypesorc1a_n1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a_n1 POSTHOOK: Output: hdfs://### HDFS PATH ### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: select * from small_alltypesorc2a_n1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a_n1 @@ -216,16 +216,16 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### -51 NULL NULL -1874052220 -51.0 NULL c61B47I604gymFJ sjWQS78 1969-12-31 16:00:08.451 NULL false false -51 NULL NULL -1927203921 -51.0 NULL 45ja5suO 42S0I0 1969-12-31 16:00:08.451 NULL true true -51 NULL NULL -1970551565 -51.0 NULL r2uhJH3 loXMWyrHjVeK 1969-12-31 16:00:08.451 NULL false false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true -64 -7196 NULL 406535485 -64.0 -7196.0 NULL E011i 1969-12-31 15:59:56.048 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -265,7 +265,7 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 POSTHOOK: Output: hdfs://### HDFS PATH ### -20 +32 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -305,7 +305,7 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 POSTHOOK: Output: hdfs://### HDFS PATH ### -28 +24 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -345,4 +345,4 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 POSTHOOK: Output: hdfs://### HDFS PATH ### -28 +24 diff --git ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out index 9be72eaeb3..70f7401b35 100644 --- ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out +++ ql/src/test/results/clientpositive/spark/vector_outer_join4.q.out @@ -130,16 +130,16 @@ POSTHOOK: query: select * from small_alltypesorc3b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3b POSTHOOK: Output: hdfs://### HDFS PATH ### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: select * from small_alltypesorc4b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4b @@ -236,16 +236,16 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail formatted select * from small_alltypesorc_b c @@ -317,16 +317,16 @@ POSTHOOK: Output: hdfs://### HDFS PATH ### -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -3097 253665376 NULL -64.0 -3097.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.013 1969-12-31 16:00:06.097 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail formatted select c.ctinyint from small_alltypesorc_b c diff --git ql/src/test/results/clientpositive/spark/vector_string_concat.q.out ql/src/test/results/clientpositive/spark/vector_string_concat.q.out index d8beb2050d..2946a02a10 100644 --- ql/src/test/results/clientpositive/spark/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/spark/vector_string_concat.q.out @@ -468,7 +468,6 @@ POSTHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@vectortab2korc_n0 #### A masked pattern was here #### -NULL Quarter 1-1970 Quarter 1-1971 Quarter 1-1972 @@ -518,3 +517,4 @@ Quarter 1-2015 Quarter 1-2016 Quarter 1-2017 Quarter 1-2018 +Quarter 1-2019 diff --git ql/src/test/results/clientpositive/spark/vectorization_0.q.out ql/src/test/results/clientpositive/spark/vectorization_0.q.out index bd881879f8..6b09206537 100644 --- ql/src/test/results/clientpositive/spark/vectorization_0.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_0.q.out @@ -62,10 +62,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:tinyint, 1:tinyint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: tinyint), _col1 (type: tinyint), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized @@ -117,10 +116,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:tinyint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: tinyint), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -128,7 +127,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -241,10 +240,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -296,17 +294,16 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -568,10 +565,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:bigint, 1:bigint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized @@ -623,10 +619,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:bigint, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -634,7 +630,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -747,10 +743,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized @@ -802,17 +797,16 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:bigint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1074,10 +1068,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3] + valueColumns: 0:float, 1:float, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: float), _col1 (type: float), _col2 (type: bigint), _col3 (type: bigint) Execution mode: vectorized @@ -1129,10 +1122,10 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 3] + valueColumns: 1:float, 2:bigint, 3:bigint Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: float), _col2 (type: bigint), _col3 (type: bigint) Reducer 3 @@ -1140,7 +1133,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1253,10 +1246,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:double Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double) Execution mode: vectorized @@ -1308,17 +1300,16 @@ STAGE PLANS: sort order: + Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0] + keyColumns: 0:double native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: a + reduceColumnNullOrder: z reduceColumnSortOrder: + allNative: false usesVectorUDFAdaptor: false @@ -1627,10 +1618,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double, 4:bigint, 5:double, 6:tinyint Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: bigint), _col5 (type: double), _col6 (type: tinyint) Execution mode: vectorized @@ -31049,7 +31039,7 @@ STAGE PLANS: Statistics: Num rows: 6144 Data size: 1453997 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) - null sort order: a + null sort order: z sort order: + Statistics: Num rows: 6144 Data size: 1453997 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/spark/vectorization_1.q.out ql/src/test/results/clientpositive/spark/vectorization_1.q.out index e6d6e64c16..e933da7147 100644 --- ql/src/test/results/clientpositive/spark/vectorization_1.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_1.q.out @@ -96,10 +96,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 0:double, 1:double, 2:bigint, 3:double, 4:tinyint, 5:int, 6:double, 7:double, 8:bigint, 9:bigint Statistics: Num rows: 1 Data size: 72 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: tinyint), _col5 (type: int), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_12.q.out ql/src/test/results/clientpositive/spark/vectorization_12.q.out index 36f474dcbf..53902c4b86 100644 --- ql/src/test/results/clientpositive/spark/vectorization_12.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_12.q.out @@ -123,10 +123,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: double), _col1 (type: bigint), _col2 (type: string), _col3 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:double, 1:bigint, 2:string, 3:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 5, 6, 7, 8, 9, 10] + valueColumns: 4:bigint, 5:double, 6:double, 7:double, 8:bigint, 9:bigint, 10:double Statistics: Num rows: 3754 Data size: 888395 Basic stats: COMPLETE Column stats: NONE value expressions: _col4 (type: bigint), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: bigint), _col10 (type: double) Execution mode: vectorized @@ -189,10 +189,10 @@ STAGE PLANS: sort order: +++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:double, 1:bigint, 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 11, 12, 4, 13, 14, 19, 15, 20, 22, 24, 9, 26, 25, 21, 27] + valueColumns: 3:boolean, 11:double, 12:bigint, 4:bigint, 13:bigint, 14:double, 19:double, 15:double, 20:double, 22:double, 24:decimal(22,2), 9:bigint, 26:double, 25:double, 21:double, 27:double Statistics: Num rows: 1877 Data size: 444197 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col4 (type: double), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: decimal(22,2)), _col14 (type: bigint), _col15 (type: double), _col17 (type: double), _col18 (type: double), _col19 (type: double) Reducer 3 @@ -200,7 +200,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaa + reduceColumnNullOrder: zzz reduceColumnSortOrder: +++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/spark/vectorization_13.q.out ql/src/test/results/clientpositive/spark/vectorization_13.q.out index 4d1fd3f557..a49738ecaa 100644 --- ql/src/test/results/clientpositive/spark/vectorization_13.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_13.q.out @@ -125,10 +125,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: boolean), _col1 (type: tinyint), _col2 (type: timestamp), _col3 (type: float), _col4 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4] + keyColumns: 0:boolean, 1:tinyint, 2:timestamp, 3:float, 4:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 13, 14] + valueColumns: 5:tinyint, 6:double, 7:double, 8:double, 9:bigint, 10:double, 11:double, 12:bigint, 13:float, 14:tinyint Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE value expressions: _col5 (type: tinyint), _col6 (type: double), _col7 (type: double), _col8 (type: double), _col9 (type: bigint), _col10 (type: double), _col11 (type: double), _col12 (type: bigint), _col13 (type: float), _col14 (type: tinyint) Execution mode: vectorized @@ -191,10 +191,9 @@ STAGE PLANS: sort order: +++++++++++++++++++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3, 4, 15, 5, 17, 6, 20, 19, 21, 22, 23, 24, 27, 28, 25, 13, 31, 14] + keyColumns: 0:boolean, 1:tinyint, 2:timestamp, 3:float, 4:string, 15:tinyint, 5:tinyint, 17:tinyint, 6:double, 20:double, 19:double, 21:float, 22:double, 23:double, 24:double, 27:decimal(7,3), 28:double, 25:double, 13:float, 31:double, 14:tinyint native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 1365 Data size: 323031 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Reducer 3 @@ -202,7 +201,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaaaaaaaaaaaaaaaaaaa + reduceColumnNullOrder: zzzzzzzzzzzzzzzzzzzzz reduceColumnSortOrder: +++++++++++++++++++++ allNative: false usesVectorUDFAdaptor: false @@ -312,46 +311,46 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -55 1969-12-31 16:00:11.38 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -55 1969-12-31 16:00:11.751 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -56 1969-12-31 16:00:13.602 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:13.958 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:15.038 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -57 1969-12-31 16:00:11.451 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:11.883 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:12.626 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:13.578 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:15.39 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -58 1969-12-31 16:00:12.065 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.683 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.948 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:14.066 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:15.658 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -59 1969-12-31 16:00:12.008 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.15 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.625 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.296 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.861 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -60 1969-12-31 16:00:11.504 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.641 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.996 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:12.779 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -61 1969-12-31 16:00:11.842 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:12.454 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:14.192 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:16.558 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -55 1969-12-31 16:00:12.297 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -55 1969-12-31 16:00:13.15 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -56 1969-12-31 16:00:11.242 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:13.534 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.038 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.689 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:16.37 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -57 1969-12-31 16:00:11.534 -57.0 cvLH6Eat2yFsyy7p 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:13.365 -57.0 1cGVWH7n1QU 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:14.225 -57.0 821UdmGbkEf4j 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -58 1969-12-31 16:00:12.918 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:13.209 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:14.933 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -59 1969-12-31 16:00:11.065 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.109 -59.0 1cGVWH7n1QU 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.231 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.758 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:12.227 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.242 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.278 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.069 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.125 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -60 1969-12-31 16:00:11.849 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.223 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.291 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:13.567 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:15.188 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:16.165 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.325 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.694 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, @@ -641,43 +640,43 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -61 1969-12-31 16:00:00.142 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:02.698 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:03.049 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.165 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.977 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:00.037 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.22 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.515 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.734 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:02.373 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:03.85 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:08.198 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.025 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.889 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.069 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.225 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.485 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:01.843 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:03.552 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:06.852 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:07.375 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:10.205 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:00.199 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:00.29 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:01.785 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:03.944 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:05.997 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:10.858 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -61 1969-12-31 16:00:00.554 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.339 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.497 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:03.742 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:07.538 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:09.809 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:10.713 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:00.337 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.659 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.684 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:01.419 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.123 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.922 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:04.978 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.756 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.847 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.903 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:05.654 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:07.623 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:09.14 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 15:59:58.959 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.013 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.172 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.631 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.305 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.79 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:02.496 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:03.088 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:04.662 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:10.273 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 diff --git ql/src/test/results/clientpositive/spark/vectorization_14.q.out ql/src/test/results/clientpositive/spark/vectorization_14.q.out index 93c26cad5a..96e17a9c30 100644 --- ql/src/test/results/clientpositive/spark/vectorization_14.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_14.q.out @@ -125,10 +125,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: float), _col2 (type: double), _col3 (type: timestamp), _col4 (type: boolean) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4] + keyColumns: 0:string, 1:float, 2:double, 3:timestamp, 4:boolean native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5, 6, 7, 8, 9, 10, 11] + valueColumns: 5:double, 6:double, 7:bigint, 8:float, 9:double, 10:double, 11:bigint Statistics: Num rows: 606 Data size: 143411 Basic stats: COMPLETE Column stats: NONE value expressions: _col5 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: float), _col9 (type: double), _col10 (type: double), _col11 (type: bigint) Execution mode: vectorized @@ -191,10 +191,10 @@ STAGE PLANS: sort order: ++++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1, 2, 3] + keyColumns: 0:string, 1:float, 2:double, 3:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [4, 12, 14, 13, 15, 8, 19, 20, 21, 22, 11, 24, 25, 23, 29, 28, 31, 34] + valueColumns: 4:boolean, 12:double, 14:double, 13:double, 15:float, 8:float, 19:float, 20:float, 21:double, 22:double, 11:bigint, 24:double, 25:double, 23:double, 29:double, 28:double, 31:double, 34:double Statistics: Num rows: 303 Data size: 71705 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: boolean), _col5 (type: double), _col6 (type: double), _col7 (type: double), _col8 (type: float), _col9 (type: float), _col10 (type: float), _col11 (type: float), _col12 (type: double), _col13 (type: double), _col14 (type: bigint), _col15 (type: double), _col16 (type: double), _col17 (type: double), _col18 (type: double), _col19 (type: double), _col20 (type: double), _col21 (type: double) Reducer 3 @@ -202,7 +202,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaaa + reduceColumnNullOrder: zzzz reduceColumnSortOrder: ++++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/spark/vectorization_15.q.out ql/src/test/results/clientpositive/spark/vectorization_15.q.out index 66727764a4..1cea297dd0 100644 --- ql/src/test/results/clientpositive/spark/vectorization_15.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_15.q.out @@ -121,10 +121,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: float), _col1 (type: boolean), _col2 (type: double), _col3 (type: string), _col4 (type: tinyint), _col5 (type: int), _col6 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2, 3, 4, 5, 6] + keyColumns: 0:float, 1:boolean, 2:double, 3:string, 4:tinyint, 5:int, 6:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + valueColumns: 7:double, 8:double, 9:bigint, 10:double, 11:double, 12:double, 13:bigint, 14:double, 15:double, 16:bigint Statistics: Num rows: 12288 Data size: 2907994 Basic stats: COMPLETE Column stats: NONE value expressions: _col7 (type: double), _col8 (type: double), _col9 (type: bigint), _col10 (type: double), _col11 (type: double), _col12 (type: double), _col13 (type: bigint), _col14 (type: double), _col15 (type: double), _col16 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_16.q.out ql/src/test/results/clientpositive/spark/vectorization_16.q.out index 3ceb7516c7..adb0491640 100644 --- ql/src/test/results/clientpositive/spark/vectorization_16.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_16.q.out @@ -98,10 +98,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:double, 2:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6] + valueColumns: 3:bigint, 4:double, 5:double, 6:double Statistics: Num rows: 4096 Data size: 969331 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint), _col4 (type: double), _col5 (type: double), _col6 (type: double) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_17.q.out ql/src/test/results/clientpositive/spark/vectorization_17.q.out index 6df2bd1386..ba82b68e12 100644 --- ql/src/test/results/clientpositive/spark/vectorization_17.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_17.q.out @@ -91,10 +91,10 @@ STAGE PLANS: sort order: ++ Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [3, 4] + keyColumns: 3:bigint, 4:float native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [6, 2, 8, 5, 15, 16, 14, 17, 19, 20, 22, 18] + valueColumns: 6:string, 2:int, 8:timestamp, 5:double, 15:double, 16:bigint, 14:double, 17:double, 19:double, 20:double, 22:decimal(11,4), 18:double Statistics: Num rows: 4096 Data size: 969331 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string), _col2 (type: int), _col3 (type: timestamp), _col4 (type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: double), _col9 (type: double), _col10 (type: double), _col11 (type: double), _col12 (type: decimal(11,4)), _col13 (type: double) Execution mode: vectorized @@ -118,7 +118,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false diff --git ql/src/test/results/clientpositive/spark/vectorization_2.q.out ql/src/test/results/clientpositive/spark/vectorization_2.q.out index b521c5b440..641e61ea2a 100644 --- ql/src/test/results/clientpositive/spark/vectorization_2.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_2.q.out @@ -100,10 +100,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + valueColumns: 0:bigint, 1:bigint, 2:double, 3:double, 4:double, 5:bigint, 6:bigint, 7:tinyint, 8:double, 9:bigint Statistics: Num rows: 1 Data size: 76 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: double), _col3 (type: double), _col4 (type: double), _col5 (type: bigint), _col6 (type: bigint), _col7 (type: tinyint), _col8 (type: double), _col9 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_3.q.out ql/src/test/results/clientpositive/spark/vectorization_3.q.out index d329bb8139..1baaaf60ea 100644 --- ql/src/test/results/clientpositive/spark/vectorization_3.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_3.q.out @@ -105,10 +105,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + valueColumns: 0:double, 1:double, 2:bigint, 3:double, 4:double, 5:bigint, 6:double, 7:double, 8:bigint, 9:double, 10:bigint, 11:bigint, 12:double, 13:double Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: double), _col1 (type: double), _col2 (type: bigint), _col3 (type: double), _col4 (type: double), _col5 (type: bigint), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: double), _col10 (type: bigint), _col11 (type: bigint), _col12 (type: double), _col13 (type: double) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_4.q.out ql/src/test/results/clientpositive/spark/vectorization_4.q.out index 28d07ddd44..c67b97f35e 100644 --- ql/src/test/results/clientpositive/spark/vectorization_4.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_4.q.out @@ -100,10 +100,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4] + valueColumns: 0:bigint, 1:double, 2:double, 3:bigint, 4:tinyint Statistics: Num rows: 1 Data size: 36 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: double), _col2 (type: double), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_5.q.out ql/src/test/results/clientpositive/spark/vectorization_5.q.out index a5e4ade5e1..da9a4d3303 100644 --- ql/src/test/results/clientpositive/spark/vectorization_5.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_5.q.out @@ -93,10 +93,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1, 2, 3, 4] + valueColumns: 0:smallint, 1:bigint, 2:smallint, 3:bigint, 4:tinyint Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: smallint), _col1 (type: bigint), _col2 (type: smallint), _col3 (type: bigint), _col4 (type: tinyint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_9.q.out ql/src/test/results/clientpositive/spark/vectorization_9.q.out index 3ceb7516c7..adb0491640 100644 --- ql/src/test/results/clientpositive/spark/vectorization_9.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_9.q.out @@ -98,10 +98,10 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string), _col1 (type: double), _col2 (type: timestamp) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [0, 1, 2] + keyColumns: 0:string, 1:double, 2:timestamp native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [3, 4, 5, 6] + valueColumns: 3:bigint, 4:double, 5:double, 6:double Statistics: Num rows: 4096 Data size: 969331 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: bigint), _col4 (type: double), _col5 (type: double), _col6 (type: double) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_div0.q.out ql/src/test/results/clientpositive/spark/vectorization_div0.q.out index 0dfeb1472d..cec37eb442 100644 --- ql/src/test/results/clientpositive/spark/vectorization_div0.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_div0.q.out @@ -109,106 +109,106 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### cint cint_div ctinyint ctinyint_div cbigint cbigint_div cdouble cdouble_div -NULL NULL -60 NULL -1016256928 NULL 15601.0 NULL -NULL NULL -60 NULL -1062217466 NULL -200.0 NULL -NULL NULL -60 NULL -1183915345 NULL -7196.0 NULL -NULL NULL -60 NULL -126921733 NULL -200.0 NULL -NULL NULL -60 NULL -1445021496 NULL -200.0 NULL -NULL NULL -60 NULL -1690528981 NULL -200.0 NULL -NULL NULL -60 NULL -1743144280 NULL 15601.0 NULL -NULL NULL -60 NULL -1802243330 NULL -7196.0 NULL -NULL NULL -60 NULL -1860186661 NULL -200.0 NULL -NULL NULL -60 NULL -2041965187 NULL 15601.0 NULL -NULL NULL -60 NULL -483910982 NULL -200.0 NULL -NULL NULL -60 NULL -508015343 NULL -200.0 NULL -NULL NULL -60 NULL -519753851 NULL 15601.0 NULL -NULL NULL -60 NULL -5953872 NULL 15601.0 NULL -NULL NULL -60 NULL -68838726 NULL -7196.0 NULL -NULL NULL -60 NULL -903925845 NULL 15601.0 NULL -NULL NULL -60 NULL 1122241452 NULL 15601.0 NULL -NULL NULL -60 NULL 1172431520 NULL -200.0 NULL -NULL NULL -60 NULL 927847540 NULL -200.0 NULL -NULL NULL -60 NULL NULL NULL -200.0 NULL -NULL NULL -61 NULL -1022679553 NULL 15601.0 NULL -NULL NULL -61 NULL -1062521098 NULL -7196.0 NULL -NULL NULL -61 NULL -1313743110 NULL -200.0 NULL -NULL NULL -61 NULL -1513172815 NULL -7196.0 NULL -NULL NULL -61 NULL -1728754595 NULL -7196.0 NULL -NULL NULL -61 NULL -1769786673 NULL -200.0 NULL -NULL NULL -61 NULL -2114172148 NULL -7196.0 NULL -NULL NULL -61 NULL -2175533 NULL -7196.0 NULL -NULL NULL -61 NULL -836697023 NULL -200.0 NULL -NULL NULL -61 NULL -854893578 NULL 15601.0 NULL -NULL NULL -61 NULL -982179838 NULL 15601.0 NULL -NULL NULL -61 NULL 1114673625 NULL 15601.0 NULL -NULL NULL -61 NULL 1139675920 NULL 15601.0 NULL -NULL NULL -61 NULL 1237548317 NULL -7196.0 NULL -NULL NULL -61 NULL 127734700 NULL -7196.0 NULL -NULL NULL -61 NULL 1399483216 NULL -200.0 NULL -NULL NULL -61 NULL 1415466231 NULL -7196.0 NULL -NULL NULL -61 NULL 184425274 NULL -200.0 NULL -NULL NULL -61 NULL 1977536065 NULL 15601.0 NULL -NULL NULL -61 NULL 484546535 NULL 15601.0 NULL -NULL NULL -61 NULL 623787602 NULL -200.0 NULL -NULL NULL -61 NULL 919939154 NULL 15601.0 NULL -NULL NULL -61 NULL 943547371 NULL -7196.0 NULL -NULL NULL -61 NULL NULL NULL -7196.0 NULL -NULL NULL -61 NULL NULL NULL -7196.0 NULL -NULL NULL -62 NULL -1113073921 NULL -200.0 NULL -NULL NULL -62 NULL -1367753794 NULL -7196.0 NULL -NULL NULL -62 NULL -1592016120 NULL 15601.0 NULL -NULL NULL -62 NULL -167812632 NULL -200.0 NULL -NULL NULL -62 NULL -1726415169 NULL 15601.0 NULL -NULL NULL -62 NULL -1761785534 NULL -7196.0 NULL -NULL NULL -62 NULL -2080605724 NULL -200.0 NULL -NULL NULL -62 NULL -642836823 NULL -7196.0 NULL -NULL NULL -62 NULL -840223244 NULL -7196.0 NULL -NULL NULL -62 NULL 1221804187 NULL -200.0 NULL -NULL NULL -62 NULL 1380844570 NULL -7196.0 NULL -NULL NULL -62 NULL 1443417260 NULL -200.0 NULL -NULL NULL -62 NULL 1607712873 NULL -200.0 NULL -NULL NULL -62 NULL 1670449519 NULL -7196.0 NULL -NULL NULL -62 NULL 2071666427 NULL -200.0 NULL -NULL NULL -62 NULL 281485844 NULL 15601.0 NULL -NULL NULL -62 NULL 325025905 NULL -200.0 NULL -NULL NULL -62 NULL 667693308 NULL 15601.0 NULL -NULL NULL -62 NULL 68899019 NULL 15601.0 NULL -NULL NULL -62 NULL 726070601 NULL -200.0 NULL -NULL NULL -62 NULL 73960976 NULL 15601.0 NULL -NULL NULL -62 NULL 756424745 NULL -7196.0 NULL -NULL NULL -62 NULL 986221936 NULL -7196.0 NULL -NULL NULL -62 NULL NULL NULL -7196.0 NULL -NULL NULL -62 NULL NULL NULL -7196.0 NULL -NULL NULL -63 NULL -1167054574 NULL 15601.0 NULL -NULL NULL -63 NULL -1224023895 NULL -7196.0 NULL -NULL NULL -63 NULL -1574729892 NULL 15601.0 NULL -NULL NULL -63 NULL -1711796768 NULL -7196.0 NULL -NULL NULL -63 NULL -1996001975 NULL 15601.0 NULL -NULL NULL -63 NULL -1999307539 NULL -200.0 NULL -NULL NULL -63 NULL -200542601 NULL 15601.0 NULL -NULL NULL -63 NULL -2070832461 NULL -200.0 NULL -NULL NULL -63 NULL -721244708 NULL 15601.0 NULL -NULL NULL -63 NULL -994504916 NULL -7196.0 NULL -NULL NULL -63 NULL -997946077 NULL -200.0 NULL -NULL NULL -63 NULL 1089367203 NULL -200.0 NULL -NULL NULL -63 NULL 1927856372 NULL -200.0 NULL -NULL NULL -63 NULL 2059199534 NULL 15601.0 NULL -NULL NULL -63 NULL 483904240 NULL 15601.0 NULL -NULL NULL -63 NULL 507317726 NULL -200.0 NULL -NULL NULL -63 NULL 956380949 NULL -200.0 NULL -NULL NULL -64 NULL -1615920595 NULL -7196.0 NULL -NULL NULL -64 NULL -1639157869 NULL -7196.0 NULL -NULL NULL -64 NULL -1809291815 NULL 15601.0 NULL -NULL NULL -64 NULL -1809444706 NULL -200.0 NULL -NULL NULL -64 NULL -527203677 NULL -7196.0 NULL -NULL NULL -64 NULL 1090418478 NULL -7196.0 NULL -NULL NULL -64 NULL 1421812187 NULL 15601.0 NULL -NULL NULL -64 NULL 1805860756 NULL -7196.0 NULL -NULL NULL -64 NULL 1960950366 NULL 15601.0 NULL -NULL NULL -64 NULL 2118653994 NULL -200.0 NULL -NULL NULL -64 NULL 406535485 NULL -7196.0 NULL -NULL NULL -64 NULL 658026952 NULL -7196.0 NULL -NULL NULL -64 NULL 927647669 NULL -200.0 NULL +-1039715238 NULL -51 NULL -86361999 NULL NULL NULL +-1039762548 NULL NULL NULL -1645852809 NULL -3802.0 NULL +-1039776293 NULL NULL NULL -1645852809 NULL 13704.0 NULL +-1041252354 NULL NULL NULL -1887561756 NULL 756.0 NULL +-1041353707 NULL 11 NULL -931949639 NULL NULL NULL +-1041391389 NULL NULL NULL 1864027286 NULL -12970.0 NULL +-1041734429 NULL NULL NULL -1645852809 NULL -836.0 NULL +-1042396242 NULL NULL NULL -1887561756 NULL 9583.0 NULL +-1042712895 NULL NULL NULL -1887561756 NULL 9296.0 NULL +-1042805968 NULL NULL NULL -1887561756 NULL 5133.0 NULL +-1043082182 NULL NULL NULL -1887561756 NULL 9180.0 NULL +-1043132597 NULL NULL NULL -1887561756 NULL 12302.0 NULL +-1043573508 NULL NULL NULL 1864027286 NULL 16216.0 NULL +-1043979188 NULL 11 NULL -8894336 NULL NULL NULL +-1044093617 NULL NULL NULL -1887561756 NULL -3422.0 NULL +-1044207190 NULL NULL NULL -1645852809 NULL 5381.0 NULL +-1044357977 NULL 11 NULL -1392575676 NULL NULL NULL +-1044748460 NULL -51 NULL 538703088 NULL NULL NULL +-1044828205 NULL -51 NULL -1627128549 NULL NULL NULL +-1045087657 NULL NULL NULL -1645852809 NULL -5865.0 NULL +-1045181724 NULL NULL NULL -1887561756 NULL -5706.0 NULL +-1045196363 NULL NULL NULL -1887561756 NULL -5039.0 NULL +-1045737053 NULL 8 NULL -1286738860 NULL NULL NULL +-1045867222 NULL NULL NULL -1887561756 NULL -8034.0 NULL +-1046399794 NULL NULL NULL -1887561756 NULL 4130.0 NULL +-1046766350 NULL 8 NULL -1069616395 NULL NULL NULL +-1046913669 NULL 8 NULL -90393132 NULL NULL NULL +-1047036113 NULL 11 NULL -240113848 NULL NULL NULL +-1047782718 NULL 11 NULL -1527855515 NULL NULL NULL +-1048097158 NULL 11 NULL -234579722 NULL NULL NULL +-1048696030 NULL 11 NULL -1554184139 NULL NULL NULL +-1048934049 NULL NULL NULL -1887561756 NULL -524.0 NULL +-1049984461 NULL 8 NULL -247067895 NULL NULL NULL +-1050165799 NULL NULL NULL 1864027286 NULL 8634.0 NULL +-1050388484 NULL 8 NULL 987404155 NULL NULL NULL +-1050657303 NULL NULL NULL -1645852809 NULL -6999.0 NULL +-1050684541 NULL NULL NULL -1887561756 NULL -8261.0 NULL +-1051223597 NULL 11 NULL -1074802968 NULL NULL NULL +-1052322972 NULL NULL NULL -1645852809 NULL -7433.0 NULL +-1052668265 NULL 8 NULL 1712280188 NULL NULL NULL +-1052745800 NULL NULL NULL -1645852809 NULL -12404.0 NULL +-1053238077 NULL NULL NULL -1645852809 NULL -3704.0 NULL +-1053254526 NULL 11 NULL 1704531790 NULL NULL NULL +-1053385587 NULL NULL NULL -1645852809 NULL 14504.0 NULL +-1054849160 NULL 11 NULL -1027630923 NULL NULL NULL +-1054958082 NULL 8 NULL 762300991 NULL NULL NULL +-1055040773 NULL -51 NULL 1331071870 NULL NULL NULL +-1055076545 NULL 11 NULL 542002983 NULL NULL NULL +-1055185482 NULL 11 NULL -398806473 NULL NULL NULL +-1055316250 NULL NULL NULL -1887561756 NULL -14990.0 NULL +-1055669248 NULL NULL NULL 1864027286 NULL 2570.0 NULL +-1055945837 NULL NULL NULL -1645852809 NULL 13690.0 NULL +-1056684111 NULL NULL NULL 1864027286 NULL 13991.0 NULL +-1058286942 NULL 8 NULL -922041114 NULL NULL NULL +-1058844180 NULL -51 NULL 822773337 NULL NULL NULL +-1058897881 NULL 8 NULL -800997317 NULL NULL NULL +-1059047258 NULL NULL NULL 1864027286 NULL 12452.0 NULL +-1059338191 NULL NULL NULL 1864027286 NULL 7322.0 NULL +-1059487309 NULL 8 NULL 1632546080 NULL NULL NULL +-1059941909 NULL NULL NULL -1887561756 NULL 8782.0 NULL +-1060624784 NULL -51 NULL -941434751 NULL NULL NULL +-1060670281 NULL 11 NULL -1705503157 NULL NULL NULL +-1060990068 NULL 11 NULL 960036652 NULL NULL NULL +-1061057428 NULL NULL NULL -1887561756 NULL -1085.0 NULL +-1061509617 NULL 8 NULL 453428995 NULL NULL NULL +-1061614989 NULL NULL NULL 1864027286 NULL -4234.0 NULL +-1062973443 NULL NULL NULL -1645852809 NULL 10541.0 NULL +-1063164541 NULL 8 NULL -74907656 NULL NULL NULL +-1063498122 NULL NULL NULL 1864027286 NULL -11480.0 NULL +-1063745167 NULL 8 NULL -68741114 NULL NULL NULL +-1064623720 NULL 11 NULL -1894858490 NULL NULL NULL +-1064718136 NULL -51 NULL 156403402 NULL NULL NULL +-1064949302 NULL NULL NULL -1645852809 NULL 6454.0 NULL +-1064981602 NULL -51 NULL -1444011153 NULL NULL NULL +-1065117869 NULL NULL NULL -1887561756 NULL 2538.0 NULL +-1065775394 NULL -51 NULL -1331703092 NULL NULL NULL +-1066226047 NULL NULL NULL 1864027286 NULL -9439.0 NULL +-1066684273 NULL -51 NULL 2034191923 NULL NULL NULL +-1066922682 NULL NULL NULL -1645852809 NULL -9987.0 NULL +-1067386090 NULL NULL NULL -1887561756 NULL -3977.0 NULL +-1067683781 NULL -51 NULL 1750003656 NULL NULL NULL +-1067874703 NULL 11 NULL -1742615956 NULL NULL NULL +-1068206466 NULL 8 NULL 1240583144 NULL NULL NULL +-1068247011 NULL 8 NULL -729456614 NULL NULL NULL +-1068336533 NULL 11 NULL 925708299 NULL NULL NULL +-1068623584 NULL NULL NULL -1887561756 NULL -14005.0 NULL +-1069097390 NULL 11 NULL -1858556598 NULL NULL NULL +-1069103950 NULL 11 NULL -927759444 NULL NULL NULL +-1069109166 NULL NULL NULL -1645852809 NULL 8390.0 NULL +-1069512165 NULL NULL NULL -1645852809 NULL 11417.0 NULL +-1069736047 NULL 11 NULL -453772520 NULL NULL NULL +-1070551679 NULL NULL NULL 1864027286 NULL -947.0 NULL +-1070883071 NULL NULL NULL -1645852809 NULL -741.0 NULL +-1071363017 NULL 8 NULL 1349676361 NULL NULL NULL +-1071480828 NULL -51 NULL -1401575336 NULL NULL NULL +-1072076362 NULL NULL NULL 1864027286 NULL -5470.0 NULL +-1072081801 NULL NULL NULL 1864027286 NULL 8373.0 NULL +-1072910839 NULL 11 NULL 2048385991 NULL NULL NULL +-1073051226 NULL NULL NULL -1887561756 NULL -7382.0 NULL +-1073279343 NULL 11 NULL -1595604468 NULL NULL NULL PREHOOK: query: explain vectorization expression select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L) as s3 from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2, s3 limit 100 @@ -833,33 +833,33 @@ cint cbigint ctinyint c1 c2 c3 c4 c5 c6 518304665 1758550605 11 -50.66466248332617 2.3752809176800223 1.0 6799565 277841025 0 519195191 301311742 8 -55.590873825535546 -0.42030748533591705 1.0 5518511 301311742 0 519627078 -1887561756 NULL -58.334667723581276 0.6495936807799166 NULL 2981116 -1887561756 NULL -NULL -1111841132 0 NULL 0.5219820874778469 NULL NULL -1111841132 NULL -NULL -1300968933 0 NULL 0.5609644308891505 NULL NULL -1300968933 NULL -NULL -1355080830 0 NULL 0.5709746619109379 NULL NULL -1355080830 NULL -NULL -1379420228 0 NULL 0.5753299124049946 NULL NULL -1379420228 NULL -NULL -1418871864 0 NULL 0.5822045387685764 NULL NULL -1418871864 NULL -NULL -203039588 0 NULL 0.1662575351985599 NULL NULL -203039588 NULL -NULL -229832118 0 NULL 0.18415622913786178 NULL NULL -229832118 NULL -NULL -277546656 0 NULL 0.21419893397937406 NULL NULL -277546656 NULL -NULL -39854776 0 NULL 0.03766811940658894 NULL NULL -39854776 NULL -NULL -438779645 0 NULL 0.3011578829200047 NULL NULL -438779645 NULL -NULL -495480552 0 NULL 0.32733585778445334 NULL NULL -495480552 NULL -NULL -741129356 0 NULL 0.42125774599060745 NULL NULL -741129356 NULL -NULL -901264012 0 NULL 0.46954044013967267 NULL NULL -901264012 NULL -NULL 1018195815 0 NULL NULL NULL NULL NULL NULL -NULL 1049949527 0 NULL 33.065410651831826 NULL NULL 2077031 NULL -NULL 10989626 0 NULL -0.010910999277030852 NULL NULL 10989626 NULL -NULL 1561097160 0 NULL 2.87547115949768 NULL NULL 475294470 NULL -NULL 1580847931 0 NULL 2.8096365161452623 NULL NULL 455543699 NULL -NULL 1585496199 0 NULL 2.794808964909849 NULL NULL 450895431 NULL -NULL 1638241933 0 NULL 2.6421291665920887 NULL NULL 398149697 NULL -NULL 1738765387 0 NULL 2.413043035072816 NULL NULL 297626243 NULL -NULL 1907356119 0 NULL 2.145120638449015 NULL NULL 129035511 NULL -NULL 2136716416 0 NULL 1.9103058218951838 NULL NULL 1018195815 NULL -NULL 2144209609 0 NULL 1.904248083305452 NULL NULL 1018195815 NULL -NULL 406548885 0 NULL -0.6646790248746937 NULL NULL 406548885 NULL -NULL 473839931 0 NULL -0.8704598313848666 NULL NULL 473839931 NULL -NULL 53950949 0 NULL -0.05595150246825374 NULL NULL 53950949 NULL -NULL 618557893 0 NULL -1.5477957895096852 NULL NULL 218919971 NULL -NULL 738226024 0 NULL -2.636805997401341 NULL NULL 178286442 NULL -NULL 98841361 0 NULL -0.10751170081349277 NULL NULL 98841361 NULL +520081159 -1827280551 8 -61.52179743844285 0.6421703489910483 1.0 4411071 -1827280551 0 +520374125 59296415 8 -63.76632193888667 -0.0618379936414602 1.0 6253679 59296415 0 +520630560 275901824 -51 -65.86752598964071 -0.3716880741932343 1.0 6857105 275901824 0 +520879263 -1480800353 11 -68.03983944100872 0.5925580727020947 1.0 304991 -1480800353 0 +521019755 -1909738698 11 -69.33052868045986 0.6522477499140705 1.0 2483927 -1909738698 0 +521080737 -1918433146 8 -69.90590821340939 0.65327733652355 1.0 6752667 -1918433146 0 +521249276 -1887561756 NULL -71.54621095544556 0.6495936807799166 NULL 3979415 -1887561756 NULL +521256931 1864027286 NULL -71.62251677559098 2.2037809539011586 NULL 4530575 172364344 NULL +521315946 -986052008 11 -72.21621730196662 0.49198107972698546 1.0 1560834 -986052008 0 +521389499 -112901465 -51 -72.96990105899457 0.099815875253453 1.0 6930203 -112901465 0 +521504167 -1645852809 NULL -74.17633871931272 0.6178013397250965 NULL 1239767 -1645852809 NULL +522187830 -1887561756 NULL -82.27398980011934 0.6495936807799166 NULL 1738996 -1887561756 NULL +522957489 -1645852809 NULL -93.76572030298651 0.6178013397250965 NULL 4270635 -1645852809 NULL +523172866 -1928034601 11 -97.57227259511133 0.654407269210678 1.0 3068469 -1928034601 0 +523369608 634246195 -51 -101.32691133031916 -1.651899525255423 1.0 1688549 250296575 0 +523396209 -1887561756 NULL -101.85663156862294 0.6495936807799166 NULL 4401851 -1887561756 NULL +524224864 -801085374 -51 -121.63263627974922 0.44033070799810264 1.0 2726601 -801085374 0 +524852698 -942817737 11 -142.54287412864886 0.48078083705155344 1.0 1998900 -942817737 0 +525437671 752506166 11 -169.6549512833958 -2.832275057881536 1.0 2028447 221126868 0 +525640312 -1887561756 NULL -181.60251653592817 0.6495936807799166 NULL 1743957 -1887561756 NULL +525718152 -1624826596 8 -186.6489214890924 0.6147608091545615 1.0 1827762 -1624826596 0 +525955379 -1645852809 NULL -203.90704267834076 0.6178013397250965 NULL 2339615 -1645852809 NULL +526337887 1864027286 NULL -239.5842681439132 2.2037809539011586 NULL 1283567 172364344 NULL +527127072 1864027286 NULL -374.4611382437247 2.2037809539011586 NULL 649142 172364344 NULL +527187434 -1645852809 NULL -391.2822101143518 0.6178013397250965 NULL 380231 -1645852809 NULL +527554807 1864027286 NULL -538.3432048246867 2.2037809539011586 NULL 336327 172364344 NULL +528023644 1864027286 NULL -1033.0657082541775 2.2037809539011586 NULL 33585 172364344 NULL +528393062 -1131246885 -51 -3728.824402808652 0.5262977631364633 1.0 116822 -1131246885 0 +528534767 NULL -64 NULL NULL 1.0 NULL NULL 0 +528534767 NULL -64 NULL NULL 1.0 NULL NULL 0 diff --git ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out index 2871c1a1c6..40e13bb23b 100644 --- ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_nested_udf.q.out @@ -52,10 +52,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0] + valueColumns: 0:bigint Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out index 48165bbf94..d694a5f3f4 100644 --- ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_part_project.q.out @@ -69,15 +69,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_part - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdouble + 2.0D) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Map Vectorization: @@ -101,13 +101,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2030 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2030 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -131,13 +131,13 @@ POSTHOOK: Input: default@alltypesorc_part POSTHOOK: Input: default@alltypesorc_part@ds=2011 POSTHOOK: Input: default@alltypesorc_part@ds=2012 #### A masked pattern was here #### -NULL -NULL --15863.0 --15863.0 --14988.0 --14988.0 --14646.0 --14646.0 --14236.0 --14236.0 +-15990.0 +-15990.0 +-15918.0 +-15918.0 +-15890.0 +-15890.0 +-14305.0 +-14305.0 +-12514.0 +-12514.0 diff --git ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 1b04155772..3844c79e1a 100644 --- ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -1232,56 +1232,56 @@ LIMIT 50 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -51 -51.0 1969-12-31 15:59:43.64 -7196 -1339164819 4992406445232 NULL NULL 7196 -14392 -7196 NULL NULL 51.0 6.4051596E8 -5.157308006568995E-5 51 -1.5598627 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:45.978 -7196 -2128720310 7935869315680 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:47.15 -7196 628698169 -2343786774032 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:57.86 -7196 -26309289 98081029392 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 15:59:58.479 -7196 -1379694191 5143499944048 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 16:00:03.963 -7196 95444104 -355815619712 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -52 -52.0 1969-12-31 16:00:04.518 -7196 -1658319459 6182214943152 NULL NULL 7196 -14392 -7196 NULL NULL 52.0 6.4051596E8 -5.258431692972308E-5 52 -1.5298654 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -53 -53.0 1969-12-31 15:59:48.882 -7196 -1560660031 5818140595568 NULL NULL 7196 -14392 -7196 NULL NULL 53.0 6.4051596E8 -5.359555379375622E-5 53 -1.5010000 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -53 -53.0 1969-12-31 15:59:57.663 -7196 898472381 -3349505036368 NULL NULL 7196 -14392 -7196 NULL NULL 53.0 6.4051596E8 -5.359555379375622E-5 53 -1.5010000 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -53 -53.0 1969-12-31 16:00:11.36 -7196 -1357789899 5061840743472 NULL NULL 7196 -14392 -7196 NULL NULL 53.0 6.4051596E8 -5.359555379375622E-5 53 -1.5010000 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 15:59:53.657 -7196 1476582815 -5504700734320 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 16:00:05.688 -7196 1614836149 -6020109163472 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 16:00:06.484 -7196 1605976008 -5987078557824 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -54 -54.0 1969-12-31 16:00:11.198 -7196 1650677402 -6153725354656 NULL NULL 7196 -14392 -7196 NULL NULL 54.0 6.4051596E8 -5.4606790657789354E-5 54 -1.4732037 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -55 -55.0 1969-12-31 15:59:43.932 -7196 1982381637 -7390318742736 NULL NULL 7196 -14392 -7196 NULL NULL 55.0 6.4051596E8 -5.561802752182249E-5 55 -1.4464182 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -55 -55.0 1969-12-31 16:00:01.138 -7196 888532643 -3312449693104 NULL NULL 7196 -14392 -7196 NULL NULL 55.0 6.4051596E8 -5.561802752182249E-5 55 -1.4464182 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -55 -55.0 1969-12-31 16:00:13.249 -7196 -685064281 2553919639568 NULL NULL 7196 -14392 -7196 NULL NULL 55.0 6.4051596E8 -5.561802752182249E-5 55 -1.4464182 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -56 -56.0 1969-12-31 16:00:02.298 -7196 -1509994296 5629258735488 NULL NULL 7196 -14392 -7196 NULL NULL 56.0 6.4051596E8 -5.6629264385855625E-5 56 -1.4205893 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 15:59:44.539 -7196 1839592407 -6858000493296 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 16:00:04.659 -7196 -1579093262 5886859680736 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 16:00:05.5 -7196 2042351711 -7613887178608 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -57 -57.0 1969-12-31 16:00:12.626 -7196 248308622 -925694542816 NULL NULL 7196 -14392 -7196 NULL NULL 57.0 6.4051596E8 -5.764050124988876E-5 57 -1.3956667 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -58 -58.0 1969-12-31 15:59:47.859 -7196 -1770443874 6600214762272 NULL NULL 7196 -14392 -7196 NULL NULL 58.0 6.4051596E8 -5.86517381139219E-5 58 -1.3716034 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -58 -58.0 1969-12-31 15:59:55.857 -7196 -825174557 3076250748496 NULL NULL 7196 -14392 -7196 NULL NULL 58.0 6.4051596E8 -5.86517381139219E-5 58 -1.3716034 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -58 -58.0 1969-12-31 16:00:12.065 -7196 1257970504 -4689714038912 NULL NULL 7196 -14392 -7196 NULL NULL 58.0 6.4051596E8 -5.86517381139219E-5 58 -1.3716034 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -59 -59.0 1969-12-31 16:00:13.15 -7196 -1604890000 5983029920000 NULL NULL 7196 -14392 -7196 NULL NULL 59.0 6.4051596E8 -5.966297497795504E-5 59 -1.3483559 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 15:59:45.385 -7196 1775867066 -6620432422048 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 15:59:52.408 -7196 1516314750 -5652821388000 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 15:59:55.806 -7196 -1802243330 6718763134240 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -60 -60.0 1969-12-31 16:00:10.618 -7196 -68838726 256630770528 NULL NULL 7196 -14392 -7196 NULL NULL 60.0 6.4051596E8 -6.0674211841988174E-5 60 -1.3258833 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 15:59:44.823 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 15:59:48.035 -7196 1237548317 -4613580125776 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:03.049 -7196 -1513172815 5641108254320 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:06.848 -7196 1415466231 -5276858109168 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:11.842 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:12.454 -7196 -2175533 8110387024 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -61 -61.0 1969-12-31 16:00:14.192 -7196 -2114172148 7881633767744 NULL NULL 7196 -14392 -7196 NULL NULL 61.0 6.4051596E8 -6.16854487060213E-5 61 -1.3041475 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 15:59:58.395 -7196 -1367753794 5098986144032 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:01.22 -7196 1670449519 -6227435806832 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:02.373 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:03.85 -7196 -642836823 2396495676144 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:09.025 -7196 -840223244 3132352253632 NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -62 -62.0 1969-12-31 16:00:12.388 -7196 NULL NULL NULL NULL 7196 -14392 -7196 NULL NULL 62.0 6.4051596E8 -6.269668557005445E-5 62 -1.2831129 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -63 -63.0 1969-12-31 16:00:03.552 -7196 -1224023895 4563161080560 NULL NULL 7196 -14392 -7196 NULL NULL 63.0 6.4051596E8 -6.370792243408759E-5 63 -1.2627460 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -63 -63.0 1969-12-31 16:00:07.375 -7196 -1711796768 6381578351104 NULL NULL 7196 -14392 -7196 NULL NULL 63.0 6.4051596E8 -6.370792243408759E-5 63 -1.2627460 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -63 -63.0 1969-12-31 16:00:11.946 -7196 -994504916 3707514326848 NULL NULL 7196 -14392 -7196 NULL NULL 63.0 6.4051596E8 -6.370792243408759E-5 63 -1.2627460 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 15:59:56.048 -7196 406535485 -1515564288080 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 16:00:01.785 -7196 -1639157869 6110780535632 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 16:00:11.912 -7196 -1615920595 6024151978160 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 -NULL -7196.0 1969-12-31 15:59:58.174 NULL false -64 -64.0 1969-12-31 16:00:12.339 -7196 1805860756 -6732248898368 NULL NULL 7196 -14392 -7196 NULL NULL 64.0 6.4051596E8 -6.471915929812072E-5 64 -1.2430156 +-1000804087 NULL NULL H8LCu4M2u4f1S true -51 -51.0 1969-12-31 16:00:08.451 NULL -873515594 3256466134432 1000804087 1000803223.743 NULL NULL NULL 1.0 1000803250.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1003789565 NULL NULL dq1Ji5vGb4GVow42 false -51 -51.0 1969-12-31 16:00:08.451 NULL -505400643 1884133597104 1003789565 1003788701.743 NULL NULL NULL 1.0 1003788728.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1012011232 NULL NULL 7q0iMi2GDq0Q false 11 11.0 1969-12-31 16:00:02.351 NULL -806973080 3008395642240 1012011232 1012010368.743 NULL NULL NULL 1.0 1012010395.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1015510885 NULL NULL Kw7fOuw4DHeyXe2yg false -51 -51.0 1969-12-31 16:00:08.451 NULL -67812054 252803337312 1015510885 1015510021.743 NULL NULL NULL 1.0 1015510048.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1016835101 NULL NULL Md2lY0T7reBu false 8 8.0 1969-12-31 16:00:15.892 NULL -491294009 1831544065552 1016835101 1016834237.743 NULL NULL NULL 1.0 1016834264.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1017266554 NULL NULL DU1m68i1Q7W3 false -51 -51.0 1969-12-31 16:00:08.451 NULL -145067516 540811699648 1017266554 1017265690.743 NULL NULL NULL 1.0 1017265717.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1020120834 NULL NULL 6Ob80MBP350rI275 true 8 8.0 1969-12-31 16:00:15.892 NULL -100465694 374536107232 1020120834 1020119970.743 NULL NULL NULL 1.0 1020119997.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1020466796 NULL NULL 7hCJ5yJvt0775jjgq8S0bX6W false 11 11.0 1969-12-31 16:00:02.351 NULL -926772952 3455009565056 1020466796 1020465932.743 NULL NULL NULL 1.0 1020465959.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1023165277 NULL NULL 438Lxo541TwY5ID80cnR5 false 11 11.0 1969-12-31 16:00:02.351 NULL -1004780673 3745822348944 1023165277 1023164413.743 NULL NULL NULL 1.0 1023164440.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1023644243 NULL NULL Cxas82oA2hX884xmYQ2jrpDX true 11 11.0 1969-12-31 16:00:02.351 NULL -866431241 3230055666448 1023644243 1023643379.743 NULL NULL NULL 1.0 1023643406.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1024321144 NULL NULL CE22Wjuk7d20ouN true 8 8.0 1969-12-31 16:00:15.892 NULL -94624654 352760710112 1024321144 1024320280.743 NULL NULL NULL 1.0 1024320307.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1026019772 NULL NULL T6Al7d0hN770XB65M0F2g true 11 11.0 1969-12-31 16:00:02.351 NULL -338489479 1261888777712 1026019772 1026018908.743 NULL NULL NULL 1.0 1026018935.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1039292315 NULL NULL 07488p5vb4d2 true 8 8.0 1969-12-31 16:00:15.892 NULL -432155916 1611077254848 1039292315 1039291451.743 NULL NULL NULL 1.0 1039291478.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1039495786 NULL NULL b0BEyNEe1bvQ true 8 8.0 1969-12-31 16:00:15.892 NULL -760564106 2835382987168 1039495786 1039494922.743 NULL NULL NULL 1.0 1039494949.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1039715238 NULL NULL oOt2v true -51 -51.0 1969-12-31 16:00:08.451 NULL -86361999 321957532272 1039715238 1039714374.743 NULL NULL NULL 1.0 1039714401.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1041353707 NULL NULL 25Qky6lf2pt5FP47Mqmb true 11 11.0 1969-12-31 16:00:02.351 NULL -931949639 3474308254192 1041353707 1041352843.743 NULL NULL NULL 1.0 1041352870.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1043979188 NULL NULL 2d3tQdCGQN5k7u7S false 11 11.0 1969-12-31 16:00:02.351 NULL -8894336 33158084608 1043979188 1043978324.743 NULL NULL NULL 1.0 1043978351.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1046913669 NULL NULL 40r4yyU6T0A0Mekf24k false 8 8.0 1969-12-31 16:00:15.892 NULL -90393132 336985596096 1046913669 1046912805.743 NULL NULL NULL 1.0 1046912832.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1047036113 NULL NULL Js07yFa2qnrfVU1j2e3 false 11 11.0 1969-12-31 16:00:02.351 NULL -240113848 895144425344 1047036113 1047035249.743 NULL NULL NULL 1.0 1047035276.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1048097158 NULL NULL fpt3gpLE true 11 11.0 1969-12-31 16:00:02.351 NULL -234579722 874513203616 1048097158 1048096294.743 NULL NULL NULL 1.0 1048096321.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1049984461 NULL NULL qUY8Rl34NWRg false 8 8.0 1969-12-31 16:00:15.892 NULL -247067895 921069112560 1049984461 1049983597.743 NULL NULL NULL 1.0 1049983624.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1054849160 NULL NULL CEGOy true 11 11.0 1969-12-31 16:00:02.351 NULL -1027630923 3831008080944 1054849160 1054848296.743 NULL NULL NULL 1.0 1054848323.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1055185482 NULL NULL l20vn2Awc true 11 11.0 1969-12-31 16:00:02.351 NULL -398806473 1486750531344 1055185482 1055184618.743 NULL NULL NULL 1.0 1055184645.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1058286942 NULL NULL R6q656btrqQM6a5nQ4GcVg true 8 8.0 1969-12-31 16:00:15.892 NULL -922041114 3437369272992 1058286942 1058286078.743 NULL NULL NULL 1.0 1058286105.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1058897881 NULL NULL 6fPk0A false 8 8.0 1969-12-31 16:00:15.892 NULL -800997317 2986117997776 1058897881 1058897017.743 NULL NULL NULL 1.0 1058897044.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1060624784 NULL NULL Das7E73 true -51 -51.0 1969-12-31 16:00:08.451 NULL -941434751 3509668751728 1060624784 1060623920.743 NULL NULL NULL 1.0 1060623947.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-1063164541 NULL NULL 1NydRD5y5o3 false 8 8.0 1969-12-31 16:00:15.892 NULL -74907656 279255741568 1063164541 1063163677.743 NULL NULL NULL 1.0 1063163704.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1063745167 NULL NULL L47nqo true 8 8.0 1969-12-31 16:00:15.892 NULL -68741114 256266872992 1063745167 1063744303.743 NULL NULL NULL 1.0 1063744330.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1068247011 NULL NULL dPbX4jd1v47r1bB6506si false 8 8.0 1969-12-31 16:00:15.892 NULL -729456614 2719414256992 1068247011 1068246147.743 NULL NULL NULL 1.0 1068246174.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-1069103950 NULL NULL 41A0nYX72UOSfxO4053xy true 11 11.0 1969-12-31 16:00:02.351 NULL -927759444 3458687207232 1069103950 1069103086.743 NULL NULL NULL 1.0 1069103113.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-1069736047 NULL NULL k17Am8uPHWk02cEf1jet true 11 11.0 1969-12-31 16:00:02.351 NULL -453772520 1691663954560 1069736047 1069735183.743 NULL NULL NULL 1.0 1069735210.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-919940926 NULL NULL i1P3Wlat5EnBugL24oS4I3 true -51 -51.0 1969-12-31 16:00:08.451 NULL -533395388 1988498006464 919940926 919940062.743 NULL NULL NULL 1.0 919940089.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-923400421 NULL NULL MJ7Ej4tBYS8l2mK true 8 8.0 1969-12-31 16:00:15.892 NULL -67708318 252416609504 923400421 923399557.743 NULL NULL NULL 1.0 923399584.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-925336063 NULL NULL 060EnWLmWE4K8Pv false -51 -51.0 1969-12-31 16:00:08.451 NULL -477173411 1778902476208 925336063 925335199.743 NULL NULL NULL 1.0 925335226.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-928500968 NULL NULL 34oSgU32X true 8 8.0 1969-12-31 16:00:15.892 NULL -831143834 3098504213152 928500968 928500104.743 NULL NULL NULL 1.0 928500131.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-930153712 NULL NULL Jj21024T2xdn6 false 11 11.0 1969-12-31 16:00:02.351 NULL -737116859 2747971650352 930153712 930152848.743 NULL NULL NULL 1.0 930152875.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-930463965 NULL NULL ldk1K false 11 11.0 1969-12-31 16:00:02.351 NULL -414014176 1543444848128 930463965 930463101.743 NULL NULL NULL 1.0 930463128.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-932998902 NULL NULL kAr0ffWGEU7MHSKp true 8 8.0 1969-12-31 16:00:15.892 NULL -230462122 859162790816 932998902 932998038.743 NULL NULL NULL 1.0 932998065.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-937557606 NULL NULL 2251WSv5eA2l6WqesdKPM2 true 8 8.0 1969-12-31 16:00:15.892 NULL -532708003 1985935435184 937557606 937556742.743 NULL NULL NULL 1.0 937556769.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-943342622 NULL NULL 3w6XYq04J0Lb3Sv82eOV2HJ true -51 -51.0 1969-12-31 16:00:08.451 NULL -750731096 2798725525888 943342622 943341758.743 NULL NULL NULL 1.0 943341785.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-949286785 NULL NULL XWuYuk5qpn5Khs3764E56 true -51 -51.0 1969-12-31 16:00:08.451 NULL -946341072 3527959516416 949286785 949285921.743 NULL NULL NULL 1.0 949285948.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-954917203 NULL NULL 1M4eTm8OcOW2dAMV2V5slS1 true -51 -51.0 1969-12-31 16:00:08.451 NULL -710267209 2647876155152 954917203 954916339.743 NULL NULL NULL 1.0 954916366.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-965597463 NULL NULL b0G65a66732y6yE65hQ0 false 8 8.0 1969-12-31 16:00:15.892 NULL -922745115 3439993788720 965597463 965596599.743 NULL NULL NULL 1.0 965596626.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-970640948 NULL NULL frhe0 false 11 11.0 1969-12-31 16:00:02.351 NULL -935612665 3487964015120 970640948 970640084.743 NULL NULL NULL 1.0 970640111.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-970918963 NULL NULL suoqdh false -51 -51.0 1969-12-31 16:00:08.451 NULL -588508542 2193959844576 970918963 970918099.743 NULL NULL NULL 1.0 970918126.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-978898374 NULL NULL ShA4jlmOwF8u7kjN false 11 11.0 1969-12-31 16:00:02.351 NULL -277483031 1034456739568 978898374 978897510.743 NULL NULL NULL 1.0 978897537.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-980072140 NULL NULL Jt7E0sR3X7V true -51 -51.0 1969-12-31 16:00:08.451 NULL -819889345 3056547478160 980072140 980071276.743 NULL NULL NULL 1.0 980071303.023 51.0 NULL -5.157308006568995E-5 51 -1.5598627 +-980511555 NULL NULL 1TBB2v0eBqlr4c7d true 8 8.0 1969-12-31 16:00:15.892 NULL -890261594 3318895222432 980511555 980510691.743 NULL NULL NULL 1.0 980510718.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 +-988289401 NULL NULL CeG187j false 11 11.0 1969-12-31 16:00:02.351 NULL -446065499 1662932180272 988289401 988288537.743 NULL NULL NULL 1.0 988288564.023 -11.0 NULL 1.1123605504364498E-5 -11 7.2320909 +-993291633 NULL NULL 8reJCOg48gHGHDs true 8 8.0 1969-12-31 16:00:15.892 NULL -861531376 3211788969728 993291633 993290769.743 NULL NULL NULL 1.0 993290796.023 -8.0 NULL 8.08989491226509E-6 -8 9.9441250 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cint, cbigint, @@ -1771,81 +1771,81 @@ LIMIT 75 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -44.0 -1416000760 15601 NULL NULL -1416000716 1416000760 44.0 -2832001476 1.0 -15601.0 NULL -1.416016361E9 0.0315682 -7197.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -48.0 -1683400285 15601 NULL NULL -1683400237 1683400285 48.0 -3366800522 1.0 -15601.0 NULL -1.683415886E9 0.0289375 -5582.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -57.0 -1057361026 15601 NULL NULL -1057360969 1057361026 57.0 -2114721995 1.0 -15601.0 NULL -1.057376627E9 0.0243684 -3251.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -62.0 -1726415169 15601 NULL NULL -1726415107 1726415169 62.0 -3452830276 1.0 -15601.0 NULL -1.72643077E9 0.0224032 -8509.0 -15601 NULL -NULL NULL NULL 1969-12-31 15:59:58.456 15601.0 -63.0 -1167054574 15601 NULL NULL -1167054511 1167054574 63.0 -2334109085 1.0 -15601.0 NULL -1.167070175E9 0.0220476 -6168.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -44.0 -1551649760 15601 NULL NULL -1551649716 1551649760 44.0 -3103299476 1.0 -15601.0 NULL -1.551665361E9 0.0315682 -5502.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -45.0 -1022657523 15601 NULL NULL -1022657478 1022657523 45.0 -2045315001 1.0 -15601.0 NULL -1.022673124E9 0.0308667 -11973.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -45.0 -1291025659 15601 NULL NULL -1291025614 1291025659 45.0 -2582051273 1.0 -15601.0 NULL -1.29104126E9 0.0308667 -11707.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -45.0 -831227593 15601 NULL NULL -831227548 831227593 45.0 -1662455141 1.0 -15601.0 NULL -8.31243194E8 0.0308667 -6313.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -46.0 -208932264 15601 NULL NULL -208932218 208932264 46.0 -417864482 1.0 -15601.0 NULL -2.08947865E8 0.0301957 -3672.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -46.0 -468932050 15601 NULL NULL -468932004 468932050 46.0 -937864054 1.0 -15601.0 NULL -4.68947651E8 0.0301957 -12793.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -47.0 -436916225 15601 NULL NULL -436916178 436916225 47.0 -873832403 1.0 -15601.0 NULL -4.36931826E8 0.0295532 -10220.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -47.0 -493471535 15601 NULL NULL -493471488 493471535 47.0 -986943023 1.0 -15601.0 NULL -4.93487136E8 0.0295532 -11905.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -1228417392 15601 NULL NULL -1228417344 1228417392 48.0 -2456834736 1.0 -15601.0 NULL -1.228432993E9 0.0289375 -10253.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -1294837001 15601 NULL NULL -1294836953 1294837001 48.0 -2589673954 1.0 -15601.0 NULL -1.294852602E9 0.0289375 -804.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -1427685796 15601 NULL NULL -1427685748 1427685796 48.0 -2855371544 1.0 -15601.0 NULL -1.427701397E9 0.0289375 -7084.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -48.0 -803222928 15601 NULL NULL -803222880 803222928 48.0 -1606445808 1.0 -15601.0 NULL -8.03238529E8 0.0289375 -5443.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -49.0 -1841324115 15601 NULL NULL -1841324066 1841324115 49.0 -3682648181 1.0 -15601.0 NULL -1.841339716E9 0.0283469 -489.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -49.0 -230127703 15601 NULL NULL -230127654 230127703 49.0 -460255357 1.0 -15601.0 NULL -2.30143304E8 0.0283469 -12953.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -50.0 -596103241 15601 NULL NULL -596103191 596103241 50.0 -1192206432 1.0 -15601.0 NULL -5.96118842E8 0.0277800 -4632.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -51.0 -546830045 15601 NULL NULL -546829994 546830045 51.0 -1093660039 1.0 -15601.0 NULL -5.46845646E8 0.0272353 -14995.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -52.0 -2097289702 15601 NULL NULL -2097289650 2097289702 52.0 -4194579352 1.0 -15601.0 NULL -2.097305303E9 0.0267115 -469.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -52.0 -886068046 15601 NULL NULL -886067994 886068046 52.0 -1772136040 1.0 -15601.0 NULL -8.86083647E8 0.0267115 -9251.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -54.0 -1114169807 15601 NULL NULL -1114169753 1114169807 54.0 -2228339560 1.0 -15601.0 NULL -1.114185408E9 0.0257222 -8791.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -54.0 -1754189160 15601 NULL NULL -1754189106 1754189160 54.0 -3508378266 1.0 -15601.0 NULL -1.754204761E9 0.0257222 -12720.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -54.0 -989710558 15601 NULL NULL -989710504 989710558 54.0 -1979421062 1.0 -15601.0 NULL -9.89726159E8 0.0257222 -14320.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -56.0 -1105322173 15601 NULL NULL -1105322117 1105322173 56.0 -2210644290 1.0 -15601.0 NULL -1.105337774E9 0.0248036 -6924.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -56.0 -1466363382 15601 NULL NULL -1466363326 1466363382 56.0 -2932726708 1.0 -15601.0 NULL -1.466378983E9 0.0248036 -9791.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -56.0 -865054294 15601 NULL NULL -865054238 865054294 56.0 -1730108532 1.0 -15601.0 NULL -8.65069895E8 0.0248036 -10046.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -1698345590 15601 NULL NULL -1698345533 1698345590 57.0 -3396691123 1.0 -15601.0 NULL -1.698361191E9 0.0243684 -5129.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -2123576095 15601 NULL NULL -2123576038 2123576095 57.0 -4247152133 1.0 -15601.0 NULL -2.123591696E9 0.0243684 -14778.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -304247740 15601 NULL NULL -304247683 304247740 57.0 -608495423 1.0 -15601.0 NULL -3.04263341E8 0.0243684 -12639.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -57.0 -365505703 15601 NULL NULL -365505646 365505703 57.0 -731011349 1.0 -15601.0 NULL -3.65521304E8 0.0243684 -5475.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -59.0 -2021724111 15601 NULL NULL -2021724052 2021724111 59.0 -4043448163 1.0 -15601.0 NULL -2.021739712E9 0.0235424 -6122.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -1016256928 15601 NULL NULL -1016256868 1016256928 60.0 -2032513796 1.0 -15601.0 NULL -1.016272529E9 0.0231500 -7788.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -1743144280 15601 NULL NULL -1743144220 1743144280 60.0 -3486288500 1.0 -15601.0 NULL -1.743159881E9 0.0231500 -13348.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -519753851 15601 NULL NULL -519753791 519753851 60.0 -1039507642 1.0 -15601.0 NULL -5.19769452E8 0.0231500 -6536.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -60.0 -5953872 15601 NULL NULL -5953812 5953872 60.0 -11907684 1.0 -15601.0 NULL -5969473.0 0.0231500 -9891.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -61.0 -982179838 15601 NULL NULL -982179777 982179838 61.0 -1964359615 1.0 -15601.0 NULL -9.82195439E8 0.0227705 -3282.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -63.0 -1574729892 15601 NULL NULL -1574729829 1574729892 63.0 -3149459721 1.0 -15601.0 NULL -1.574745493E9 0.0220476 -11755.0 -15601 NULL -NULL NULL false 1969-12-31 15:59:58.456 15601.0 -63.0 -1996001975 15601 NULL NULL -1996001912 1996001975 63.0 -3992003887 1.0 -15601.0 NULL -1.996017576E9 0.0220476 -10035.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -44.0 -1447719201 15601 NULL NULL -1447719157 1447719201 44.0 -2895438358 1.0 -15601.0 NULL -1.447734802E9 0.0315682 -8805.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -47.0 -1828371599 15601 NULL NULL -1828371552 1828371599 47.0 -3656743151 1.0 -15601.0 NULL -1.8283872E9 0.0295532 -12404.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -48.0 -1465907371 15601 NULL NULL -1465907323 1465907371 48.0 -2931814694 1.0 -15601.0 NULL -1.465922972E9 0.0289375 -6209.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -48.0 -1666377780 15601 NULL NULL -1666377732 1666377780 48.0 -3332755512 1.0 -15601.0 NULL -1.666393381E9 0.0289375 -3768.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -48.0 -652336471 15601 NULL NULL -652336423 652336471 48.0 -1304672894 1.0 -15601.0 NULL -6.52352072E8 0.0289375 -11858.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -197652849 15601 NULL NULL -197652800 197652849 49.0 -395305649 1.0 -15601.0 NULL -1.9766845E8 0.0283469 -3780.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -211726367 15601 NULL NULL -211726318 211726367 49.0 -423452685 1.0 -15601.0 NULL -2.11741968E8 0.0283469 -5196.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -57200424 15601 NULL NULL -57200375 57200424 49.0 -114400799 1.0 -15601.0 NULL -5.7216025E7 0.0283469 -7158.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -668597606 15601 NULL NULL -668597557 668597606 49.0 -1337195163 1.0 -15601.0 NULL -6.68613207E8 0.0283469 -1150.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -49.0 -990904667 15601 NULL NULL -990904618 990904667 49.0 -1981809285 1.0 -15601.0 NULL -9.90920268E8 0.0283469 -7152.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -50.0 -458110015 15601 NULL NULL -458109965 458110015 50.0 -916219980 1.0 -15601.0 NULL -4.58125616E8 0.0277800 -2251.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -52.0 -2074134645 15601 NULL NULL -2074134593 2074134645 52.0 -4148269238 1.0 -15601.0 NULL -2.074150246E9 0.0267115 -12897.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -54.0 -1795674990 15601 NULL NULL -1795674936 1795674990 54.0 -3591349926 1.0 -15601.0 NULL -1.795690591E9 0.0257222 -15491.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -54.0 -1984659810 15601 NULL NULL -1984659756 1984659810 54.0 -3969319566 1.0 -15601.0 NULL -1.984675411E9 0.0257222 -9797.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -54.0 -641670659 15601 NULL NULL -641670605 641670659 54.0 -1283341264 1.0 -15601.0 NULL -6.4168626E8 0.0257222 -1529.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -55.0 -1062767051 15601 NULL NULL -1062766996 1062767051 55.0 -2125534047 1.0 -15601.0 NULL -1.062782652E9 0.0252545 -11330.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -55.0 -1338667765 15601 NULL NULL -1338667710 1338667765 55.0 -2677335475 1.0 -15601.0 NULL -1.338683366E9 0.0252545 -8359.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -55.0 -1483320156 15601 NULL NULL -1483320101 1483320156 55.0 -2966640257 1.0 -15601.0 NULL -1.483335757E9 0.0252545 -8278.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -56.0 -1683701844 15601 NULL NULL -1683701788 1683701844 56.0 -3367403632 1.0 -15601.0 NULL -1.683717445E9 0.0248036 -10722.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -56.0 -971846497 15601 NULL NULL -971846441 971846497 56.0 -1943692938 1.0 -15601.0 NULL -9.71862098E8 0.0248036 -13404.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -57.0 -585350546 15601 NULL NULL -585350489 585350546 57.0 -1170701035 1.0 -15601.0 NULL -5.85366147E8 0.0243684 -1026.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -58.0 -1072335429 15601 NULL NULL -1072335371 1072335429 58.0 -2144670800 1.0 -15601.0 NULL -1.07235103E9 0.0239483 -694.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -58.0 -1560616588 15601 NULL NULL -1560616530 1560616588 58.0 -3121233118 1.0 -15601.0 NULL -1.560632189E9 0.0239483 -1755.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -59.0 -1315413812 15601 NULL NULL -1315413753 1315413812 59.0 -2630827565 1.0 -15601.0 NULL -1.315429413E9 0.0235424 -15497.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -59.0 -133287350 15601 NULL NULL -133287291 133287350 59.0 -266574641 1.0 -15601.0 NULL -1.33302951E8 0.0235424 -8007.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -60.0 -2041965187 15601 NULL NULL -2041965127 2041965187 60.0 -4083930314 1.0 -15601.0 NULL -2.041980788E9 0.0231500 -12701.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -60.0 -903925845 15601 NULL NULL -903925785 903925845 60.0 -1807851630 1.0 -15601.0 NULL -9.03941446E8 0.0231500 -3905.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -61.0 -1022679553 15601 NULL NULL -1022679492 1022679553 61.0 -2045359045 1.0 -15601.0 NULL -1.022695154E9 0.0227705 -2801.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -61.0 -854893578 15601 NULL NULL -854893517 854893578 61.0 -1709787095 1.0 -15601.0 NULL -8.54909179E8 0.0227705 -5581.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -62.0 -1592016120 15601 NULL NULL -1592016058 1592016120 62.0 -3184032178 1.0 -15601.0 NULL -1.592031721E9 0.0224032 -12075.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -62.0 667693308 15601 NULL NULL 667693370 -667693308 62.0 1335386678 1.0 -15601.0 NULL 6.67677707E8 0.0224032 1710.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -63.0 -200542601 15601 NULL NULL -200542538 200542601 63.0 -401085139 1.0 -15601.0 NULL -2.00558202E8 0.0220476 -7347.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -63.0 -721244708 15601 NULL NULL -721244645 721244708 63.0 -1442489353 1.0 -15601.0 NULL -7.21260309E8 0.0220476 -10478.0 -15601 NULL -NULL NULL true 1969-12-31 15:59:58.456 15601.0 -64.0 -1809291815 15601 NULL NULL -1809291751 1809291815 64.0 -3618583566 1.0 -15601.0 NULL -1.809307416E9 0.0217031 -12643.0 -15601 NULL +-104148943 tEO4vj3G true 1969-12-31 15:59:44.53 2248.0 NULL 1864027286 2248 false -104146695 NULL -1864027286 NULL NULL 1.0 -2248.0 194132281226719770 1.864025038E9 NULL 1422.0 -2248 -104144447 +-110450673 uv5m1sFX10 true 1969-12-31 16:00:16.376 -8148.0 NULL 1864027286 -8148 true -110458821 NULL -1864027286 NULL NULL 1.0 8148.0 205898256323389806 1.864035434E9 NULL 1178.0 8148 -110466969 +-128417177 ygkC2e2sUm2036Sd1U8kCG62 true 1969-12-31 16:00:01.936 -8871.0 NULL 1864027286 -8871 false -128426048 NULL -1864027286 NULL NULL 1.0 8871.0 239389657705145728 1.864036157E9 NULL 8411.0 8871 -128434919 +-129248849 w3OO7InLN4ic3M0h8xpvuBMn true 1969-12-31 15:59:48.413 3255.0 NULL 1864027286 3255 true -129245594 NULL -1864027286 NULL NULL 1.0 -3255.0 240917313811277884 1.864024031E9 NULL 2711.0 -3255 -129242339 +-140351494 xh0Qhj80MAcHEMVKx true 1969-12-31 16:00:14.98 -11115.0 NULL 1864027286 -11115 true -140362609 NULL -1864027286 NULL NULL 1.0 11115.0 261639733110149174 1.864038401E9 NULL 8441.0 11115 -140373724 +-198739996 uxnt0fsrBtPD807 true 1969-12-31 16:00:11.528 -14709.0 NULL 1864027286 -14709 false -198754705 NULL -1864027286 NULL NULL 1.0 14709.0 370484193340880630 1.864041995E9 NULL 14552.0 14709 -198769414 +-203191502 wK0N1nX22KSjcTVhDYq true 1969-12-31 15:59:49.907 -6663.0 NULL 1864027286 -6663 true -203198165 NULL -1864027286 NULL NULL 1.0 6663.0 378766924025130190 1.864033949E9 NULL 6395.0 6663 -203204828 +-25028803 x8n40D35c65l true 1969-12-31 15:59:43.775 -4002.0 NULL 1864027286 -4002 true -25032805 NULL -1864027286 NULL NULL 1.0 4002.0 46661831565117230 1.864031288E9 NULL 3740.0 4002 -25036807 +-315135285 y4jD1v2Go true 1969-12-31 15:59:43.97 -4683.0 NULL 1864027286 -4683 false -315139968 NULL -1864027286 NULL NULL 1.0 4683.0 587429499261166848 1.864031969E9 NULL 1283.0 4683 -315144651 +-360475292 uq2hp true 1969-12-31 16:00:10.933 -1007.0 NULL 1864027286 -1007 false -360476299 NULL -1864027286 NULL NULL 1.0 1007.0 671937657292294514 1.864028293E9 NULL 803.0 1007 -360477306 +-362733967 tUi8QYP4S53YPcw true 1969-12-31 16:00:00.003 -7959.0 NULL 1864027286 -7959 true -362741926 NULL -1864027286 NULL NULL 1.0 7959.0 676160847840192836 1.864035245E9 NULL 5609.0 7959 -362749885 +-367195514 t5805L0xlU0YM true 1969-12-31 15:59:43.799 -13339.0 NULL 1864027286 -13339 false -367208853 NULL -1864027286 NULL NULL 1.0 13339.0 684487321652762958 1.864040625E9 NULL 8748.0 13339 -367222192 +-370283300 x0w77gi6iqtTQ1 true 1969-12-31 15:59:44.652 1850.0 NULL 1864027286 1850 true -370281450 NULL -1864027286 NULL NULL 1.0 -1850.0 690214726299644700 1.864025436E9 NULL 586.0 -1850 -370279600 +-372506148 utfrK57P2tp0 true 1969-12-31 16:00:05.326 -12525.0 NULL 1864027286 -12525 false -372518673 NULL -1864027286 NULL NULL 1.0 12525.0 694384971016511478 1.864039811E9 NULL 6686.0 12525 -372531198 +-380733719 t7s5did true NULL -2120.0 NULL 1864027286 -2120 false -380735839 NULL -1864027286 NULL NULL 1.0 2120.0 709701992654102954 1.864029406E9 NULL 326.0 2120 -380737959 +-412772386 uO4aN4J0dKv3717r8fPG true 1969-12-31 16:00:07.824 -11809.0 NULL 1864027286 -11809 true -412784195 NULL -1864027286 NULL NULL 1.0 11809.0 769441002709544770 1.864039095E9 NULL 254.0 11809 -412796004 +-452599200 v4L3dR650oy4O8MPhjc true 1969-12-31 15:59:46.988 8757.0 NULL 1864027286 8757 false -452590443 NULL -1864027286 NULL NULL 1.0 -8757.0 843640935134827698 1.864018529E9 NULL 3509.0 -8757 -452581686 +-459571311 taArL704d542R82qw8 true 1969-12-31 16:00:00.738 -13901.0 NULL 1864027286 -13901 false -459585212 NULL -1864027286 NULL NULL 1.0 13901.0 856679375410094632 1.864041187E9 NULL 493.0 13901 -459599113 +-487903609 tINcSR1MT3f2P4 true 1969-12-31 16:00:12.099 -9147.0 NULL 1864027286 -9147 false -487912756 NULL -1864027286 NULL NULL 1.0 9147.0 909482690371460216 1.864036433E9 NULL 5891.0 9147 -487921903 +-518918140 ugq0uAy0qXj2D0fX true 1969-12-31 16:00:12.479 5245.0 NULL 1864027286 5245 false -518912895 NULL -1864027286 NULL NULL 1.0 -5245.0 967267795337252970 1.864022041E9 NULL 1491.0 -5245 -518907650 +-520054643 wc4Ae163B5VxG2L true 1969-12-31 16:00:06.693 301.0 NULL 1864027286 301 true -520054342 NULL -1864027286 NULL NULL 1.0 -301.0 969395483690775812 1.864026985E9 NULL 205.0 -301 -520054041 +-520765672 vQalqQ true 1969-12-31 15:59:44.48 -3969.0 NULL 1864027286 -3969 false -520769641 NULL -1864027286 NULL NULL 1.0 3969.0 970728820544424326 1.864031255E9 NULL 2312.0 3969 -520773610 +-532611088 wLWrtVNx188P7uXPV true 1969-12-31 16:00:04.012 -1428.0 NULL 1864027286 -1428 false -532612516 NULL -1864027286 NULL NULL 1.0 1428.0 992804262689111576 1.864028714E9 NULL 338.0 1428 -532613944 +-553779656 weQ0d24K116Y0 true 1969-12-31 16:00:12.009 11147.0 NULL 1864027286 11147 true -553768509 NULL -1864027286 NULL NULL 1.0 -11147.0 1032239610903536574 1.864016139E9 NULL 3652.0 -11147 -553757362 +-601825532 v4gQqo0bxX256o7EEN42lSoU true 1969-12-31 15:59:58.417 11021.0 NULL 1864027286 11021 false -601814511 NULL -1864027286 NULL NULL 1.0 -11021.0 1121798669614747146 1.864016265E9 NULL 1472.0 -11021 -601803490 +-64947310 vvictFVSOgi true 1969-12-31 15:59:48.172 6612.0 NULL 1864027286 6612 false -64940698 NULL -1864027286 NULL NULL 1.0 -6612.0 121051233043885628 1.864020674E9 NULL 5306.0 -6612 -64934086 +-719899789 umNykRkKiih6Cx6K42 true 1969-12-31 15:59:55.878 -10134.0 NULL 1864027286 -10134 true -719909923 NULL -1864027286 NULL NULL 1.0 10134.0 1341931739934158978 1.86403742E9 NULL 9728.0 10134 -719920057 +-758062600 vA0bEQqO50LlKcj7AAR56P63 true 1969-12-31 16:00:16.169 7111.0 NULL 1864027286 7111 false -758055489 NULL -1864027286 NULL NULL 1.0 -7111.0 1413036115798072854 1.864020175E9 NULL 6634.0 -7111 -758048378 +-770958258 uXu1mj3tWs36cGpu4p3aHq true 1969-12-31 15:59:56.944 8059.0 NULL 1864027286 8059 false -770950199 NULL -1864027286 NULL NULL 1.0 -8059.0 1437072207083129914 1.864019227E9 NULL 4763.0 -8059 -770942140 +-778541551 t66fkUkSNP78t2856Lcn true 1969-12-31 16:00:03.35 15678.0 NULL 1864027286 15678 true -778525873 NULL -1864027286 NULL NULL 1.0 -15678.0 1451193470128970678 1.864011608E9 NULL 7154.0 -15678 -778510195 +-804390280 uNJPm true 1969-12-31 16:00:12.321 -10737.0 NULL 1864027286 -10737 true -804401017 NULL -1864027286 NULL NULL 1.0 10737.0 1499425444574149862 1.864038023E9 NULL 8927.0 10737 -804411754 +-804959350 v2wRf43gpDUt1lfieq true 1969-12-31 16:00:08.659 -8072.0 NULL 1864027286 -8072 true -804967422 NULL -1864027286 NULL NULL 1.0 8072.0 1500481238949076692 1.864035358E9 NULL 686.0 8072 -804975494 +-87388872 veoqj217BlDBBVkN0ei3c true 1969-12-31 16:00:03.492 10039.0 NULL 1864027286 10039 false -87378833 NULL -1864027286 NULL NULL 1.0 -10039.0 162876528930837238 1.864017247E9 NULL 5844.0 -10039 -87368794 +-894394703 tFtQ26aDMi1tJ026luPcu true 1969-12-31 15:59:56.928 -3178.0 NULL 1864027286 -3178 true -894397881 NULL -1864027286 NULL NULL 1.0 3178.0 1667182054724580966 1.864030464E9 NULL 3166.0 3178 -894401059 +-933664265 ue8IUf0GlY18RT325P2tu true 1969-12-31 16:00:02.456 13750.0 NULL 1864027286 13750 false -933650515 NULL -1864027286 NULL NULL 1.0 -13750.0 1740350035547952290 1.864013536E9 NULL 8536.0 -13750 -933636765 +-947255611 vgKx505VdPsHO true 1969-12-31 15:59:46.062 13661.0 NULL 1864027286 13661 true -947241950 NULL -1864027286 NULL NULL 1.0 -13661.0 1765684841243847700 1.864013625E9 NULL 11158.0 -13661 -947228289 +1030560824 tmS75um6Mvyb6N1oiKP7 true 1969-12-31 15:59:53.233 -11073.0 NULL 1864027286 -11073 false 1030549751 NULL -1864027286 NULL NULL 1.0 11073.0 -1920972855444505786 1.864038359E9 NULL 9539.0 11073 1030538678 +108023602 veIw1kh7 true 1969-12-31 16:00:14.188 9239.0 NULL 1864027286 9239 true 108032841 NULL -1864027286 NULL NULL 1.0 -9239.0 -201376163408099526 1.864018047E9 NULL 3602.0 -9239 108042080 +136715714 y2Q3YW true 1969-12-31 15:59:50.737 11813.0 NULL 1864027286 11813 false 136727527 NULL -1864027286 NULL NULL 1.0 -11813.0 -254863841075301722 1.864015473E9 NULL 6764.0 -11813 136739340 +194353234 vtad71tYi1fs1e0tcJg0 true 1969-12-31 15:59:55.372 2960.0 NULL 1864027286 2960 true 194356194 NULL -1864027286 NULL NULL 1.0 -2960.0 -362285248819109484 1.864024326E9 NULL 2806.0 -2960 194359154 +200690208 wfT8d53abPxBj0L true 1969-12-31 16:00:15.522 -12052.0 NULL 1864027286 -12052 true 200678156 NULL -1864027286 NULL NULL 1.0 12052.0 -374069558488164616 1.864039338E9 NULL 4706.0 12052 200666104 +2101183 x7By66525 true 1969-12-31 16:00:05.831 -8915.0 NULL 1864027286 -8915 false 2092268 NULL -1864027286 NULL NULL 1.0 8915.0 -3900044641624648 1.864036201E9 NULL 7766.0 8915 2083353 +223484391 tca24E6L true 1969-12-31 16:00:02.505 -12721.0 NULL 1864027286 -12721 false 223471670 NULL -1864027286 NULL NULL 1.0 12721.0 -416557290527987620 1.864040007E9 NULL 6435.0 12721 223458949 +236934374 wiBqE2A1x8T8gcT4 true 1969-12-31 16:00:11.324 -15101.0 NULL 1864027286 -15101 false 236919273 NULL -1864027286 NULL NULL 1.0 15101.0 -441623989451283078 1.864042387E9 NULL 5149.0 15101 236904172 +245429195 vXc7m82uAg2g24 true 1969-12-31 15:59:57.185 -16001.0 NULL 1864027286 -16001 false 245413194 NULL -1864027286 NULL NULL 1.0 16001.0 -457456889960411484 1.864043287E9 NULL 6792.0 16001 245397193 +247204221 wblxBWSlwWlX7E true 1969-12-31 15:59:54.186 4502.0 NULL 1864027286 4502 true 247208723 NULL -1864027286 NULL NULL 1.0 -4502.0 -460803805009215778 1.864022784E9 NULL 1198.0 -4502 247213225 +252479879 tdUWi true 1969-12-31 16:00:01.806 -877.0 NULL 1864027286 -877 false 252479002 NULL -1864027286 NULL NULL 1.0 877.0 -470627748870048572 1.864028163E9 NULL 620.0 877 252478125 +304132102 vxAjxUq0k true 1969-12-31 16:00:03.466 -12962.0 NULL 1864027286 -12962 true 304119140 NULL -1864027286 NULL NULL 1.0 12962.0 -566886375154854040 1.864040248E9 NULL 952.0 12962 304106178 +308450217 t7i26BC11U1YTY8I0p true 1969-12-31 15:59:46.402 1017.0 NULL 1864027286 1017 true 308451234 NULL -1864027286 NULL NULL 1.0 -1017.0 -574961516576370924 1.864026269E9 NULL 530.0 -1017 308452251 +319983133 t78m7 true 1969-12-31 16:00:09.36 14512.0 NULL 1864027286 14512 true 319997645 NULL -1864027286 NULL NULL 1.0 -14512.0 -596484341735741470 1.864012774E9 NULL 4422.0 -14512 320012157 +336043289 xow6f03825H0h8mFjVr true 1969-12-31 15:59:51.587 -97.0 NULL 1864027286 -97 true 336043192 NULL -1864027286 NULL NULL 1.0 97.0 -626393679162536912 1.864027383E9 NULL 14.0 97 336043095 +336056067 tJ7bf true 1969-12-31 15:59:50.481 16124.0 NULL 1864027286 16124 false 336072191 NULL -1864027286 NULL NULL 1.0 -16124.0 -626447734089803626 1.864011162E9 NULL 12266.0 -16124 336088315 +396908469 uGD31tQ70Py2E0T true 1969-12-31 15:59:50.224 16084.0 NULL 1864027286 16084 false 396924553 NULL -1864027286 NULL NULL 1.0 -16084.0 -739878197275353158 1.864011202E9 NULL 4274.0 -16084 396940637 +421764768 whw6kHIbH true 1969-12-31 16:00:06.463 5142.0 NULL 1864027286 5142 true 421769910 NULL -1864027286 NULL NULL 1.0 -5142.0 -786190620653764260 1.864022144E9 NULL 866.0 -5142 421775052 +426284338 u6ELlhG3 true 1969-12-31 16:00:00.64 -15070.0 NULL 1864027286 -15070 true 426269268 NULL -1864027286 NULL NULL 1.0 15070.0 -794577546735246648 1.864042356E9 NULL 3916.0 15070 426254198 +434741484 uxI8i true 1969-12-31 16:00:12.505 8120.0 NULL 1864027286 8120 false 434749604 NULL -1864027286 NULL NULL 1.0 -8120.0 -810385124433694744 1.864019166E9 NULL 86.0 -8120 434757724 +460817498 v3A1iI77YBRwl3I16 true 1969-12-31 16:00:08.026 7391.0 NULL 1864027286 7391 true 460824889 NULL -1864027286 NULL NULL 1.0 -7391.0 -858990167163921254 1.864019895E9 NULL 2304.0 -7391 460832280 +466063930 w6OUE6V3UjfE2 true 1969-12-31 15:59:56.958 14276.0 NULL 1864027286 14276 true 466078206 NULL -1864027286 NULL NULL 1.0 -14276.0 -868782493393928916 1.86401301E9 NULL 9966.0 -14276 466092482 +526337887 t0346137k7Lk0O true 1969-12-31 15:59:51.609 15044.0 NULL 1864027286 15044 false 526352931 NULL -1864027286 NULL NULL 1.0 -15044.0 -981136225450075266 1.864012242E9 NULL 466.0 -15044 526367975 +54908166 wLIR3B37 true 1969-12-31 16:00:05.971 8499.0 NULL 1864027286 8499 true 54916665 NULL -1864027286 NULL NULL 1.0 -8499.0 -102366162016121190 1.864018787E9 NULL 1109.0 -8499 54925164 +573439687 vALXyM54AgSH4e0O4IN true 1969-12-31 16:00:10.069 -150.0 NULL 1864027286 -150 false 573439537 NULL -1864027286 NULL NULL 1.0 150.0 -1068906943839206582 1.864027436E9 NULL 86.0 150 573439387 +573476034 x1832l1R2m3V true 1969-12-31 15:59:49.722 -5070.0 NULL 1864027286 -5070 false 573470964 NULL -1864027286 NULL NULL 1.0 5070.0 -1068965524624723704 1.864032356E9 NULL 1226.0 5070 573465894 +58198060 t7Sx50XeM true 1969-12-31 16:00:07.889 7557.0 NULL 1864027286 7557 true 58205617 NULL -1864027286 NULL NULL 1.0 -7557.0 -108496858286465462 1.864019729E9 NULL 2552.0 -7557 58213174 +605953955 x5vy367f6d81FfL8AI8XJ true 1969-12-31 16:00:01.206 11683.0 NULL 1864027286 11683 false 605965638 NULL -1864027286 NULL NULL 1.0 -11683.0 -1129536483610398468 1.864015603E9 NULL 4636.0 -11683 605977321 +732924624 yxN0212hM17E8J8bJj8D7b true 1969-12-31 15:59:46.461 -6751.0 NULL 1864027286 -6751 false 732917873 NULL -1864027286 NULL NULL 1.0 6751.0 -1366178913669082678 1.864034037E9 NULL 1925.0 6751 732911122 +741306115 y1uSBY0 true 1969-12-31 15:59:56.456 -16032.0 NULL 1864027286 -16032 false 741290083 NULL -1864027286 NULL NULL 1.0 16032.0 -1381784941553204738 1.864043318E9 NULL 2678.0 16032 741274051 +746145173 wEe2THv60F6 true 1969-12-31 16:00:03.372 -5589.0 NULL 1864027286 -5589 true 746139584 NULL -1864027286 NULL NULL 1.0 5589.0 -1390824543740689024 1.864032875E9 NULL 773.0 5589 746133995 +773036466 xnk564ke0a7kay3aE6IC true 1969-12-31 16:00:12.369 -12066.0 NULL 1864027286 -12066 true 773024400 NULL -1864027286 NULL NULL 1.0 12066.0 -1440938574343778400 1.864039352E9 NULL 11276.0 12066 773012334 +773348268 vwb48kytjp0Q2YEb true 1969-12-31 15:59:44.909 12581.0 NULL 1864027286 12581 false 773360849 NULL -1864027286 NULL NULL 1.0 -12581.0 -1441565724460125814 1.864014705E9 NULL 1164.0 -12581 773373430 +855072260 y7S47c5V true 1969-12-31 16:00:08.381 -11734.0 NULL 1864027286 -11734 false 855060526 NULL -1864027286 NULL NULL 1.0 11734.0 -1593856151645512436 1.86403902E9 NULL 10982.0 11734 855048792 +86487282 vH8AHgcWaDm true 1969-12-31 16:00:10.869 13309.0 NULL 1864027286 13309 false 86500591 NULL -1864027286 NULL NULL 1.0 -13309.0 -161239461879126026 1.864013977E9 NULL 8673.0 -13309 86513900 +872474570 wT50ouOe760m3AyJ7x4p83U6 true 1969-12-31 15:59:46.57 -2856.0 NULL 1864027286 -2856 true 872471714 NULL -1864027286 NULL NULL 1.0 2856.0 -1626311081159188204 1.864030142E9 NULL 1766.0 2856 872468858 +936765787 wP0re2S74Y308jgOTc6 true 1969-12-31 15:59:50.924 -10311.0 NULL 1864027286 -10311 false 936755476 NULL -1864027286 NULL NULL 1.0 10311.0 -1746137767573918136 1.864037597E9 NULL 4706.0 10311 936745165 +95424126 txKwQS70d20 true 1969-12-31 16:00:16.343 9766.0 NULL 1864027286 9766 false 95433892 NULL -1864027286 NULL NULL 1.0 -9766.0 -177891378697177112 1.86401752E9 NULL 632.0 -9766 95443658 +97246854 vvK378scVFuBh8Q3HXUJsP true 1969-12-31 16:00:01.629 -9554.0 NULL 1864027286 -9554 true 97237300 NULL -1864027286 NULL NULL 1.0 9554.0 -181252980416967800 1.86403684E9 NULL 3670.0 9554 97227746 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT ctimestamp1, cstring2, @@ -2094,7 +2094,7 @@ POSTHOOK: Input: default@alltypesorc 1969-12-31 16:00:05.83 06Tj8f5xNhpaiE71AWqJ7b5 15601.0 -49.0 226841234 15601 63558.76548052676 -15858 315168.0 -15601.0 158740.17500000002 -6432.0 49.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:05.997 12AEw 15601.0 -64.0 1421812187 15601 398378.30961053516 -15858 411648.0 -15601.0 158740.17500000002 -6432.0 64.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:07.499 14MDiWrX 15601.0 -33.0 42147119 15601 11809.223592042588 -15858 212256.0 -15601.0 158740.17500000002 -6432.0 33.0 NULL -15601.0 -2.43391201E8 -1969-12-31 16:00:08.451 rVWAj4N1MCg8Scyp7wj2C NULL -51.0 -89010 NULL -24.93975903614458 NULL 328032.0 NULL NULL -6432.0 51.0 NULL NULL NULL +1969-12-31 16:00:08.488 16jmamsEtKc51n 15601.0 1.0 -832606494 15601 -233288.45446903896 -15858 -6432.0 -15601.0 158740.17500000002 -6432.0 -1.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:09.123 064GHv0UW8 15601.0 -14.0 1007181336 15601 282202.67189688986 -15858 90048.0 -15601.0 158740.17500000002 -6432.0 14.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:11.928 0UugmY0R5hI 15601.0 -32.0 1701987317 15601 476880.7276548053 -15858 205824.0 -15601.0 158740.17500000002 -6432.0 32.0 NULL -15601.0 -2.43391201E8 1969-12-31 16:00:12.853 12gbSP4px465TdXmV5F2apmC 15601.0 28.0 -1556827241 15601 -436208.2490893808 -15858 -180096.0 -15601.0 158740.17500000002 -6432.0 -28.0 NULL -15601.0 -2.43391201E8 diff --git ql/src/test/results/clientpositive/spark/vectorized_case.q.out ql/src/test/results/clientpositive/spark/vectorized_case.q.out index 8296a658e9..bfad0b77ed 100644 --- ql/src/test/results/clientpositive/spark/vectorized_case.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_case.q.out @@ -314,10 +314,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized @@ -456,10 +455,9 @@ STAGE PLANS: sort order: Reduce Sink Vectorization: className: VectorReduceSinkEmptyKeyOperator - keyColumnNums: [] native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [0, 1] + valueColumns: 0:bigint, 1:bigint Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: bigint), _col1 (type: bigint) Execution mode: vectorized diff --git ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out index 131f6922b6..fdf337d58d 100644 --- ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_mapjoin.q.out @@ -102,6 +102,7 @@ STAGE PLANS: className: VectorMapJoinInnerBigOnlyLongOperator native: true nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + hashTableImplementationType: OPTIMIZED outputColumnNames: _col0, _col1 input vertices: 1 Map 3 diff --git ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out index 0c4831055b..e6595f943a 100644 --- ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_ptf.q.out @@ -154,11 +154,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -197,7 +197,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -213,7 +213,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -241,7 +241,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -406,10 +406,10 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 2, 5] + valueColumns: 1:string, 2:string, 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_name (type: string), p_mfgr (type: string), p_size (type: int) Execution mode: vectorized @@ -450,10 +450,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -512,7 +511,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -543,7 +542,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -658,11 +657,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -701,7 +700,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -814,11 +813,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -857,7 +856,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -873,7 +872,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -901,7 +900,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1063,11 +1062,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -1106,7 +1105,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1137,7 +1136,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -1281,11 +1280,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -1324,7 +1323,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1364,7 +1363,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -1501,11 +1500,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 3, 4, 5, 6, 7, 8] + partitionColumns: 2:string + valueColumns: 0:int, 3:string, 4:string, 5:int, 6:string, 7:double, 8:string Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_partkey (type: int), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) Execution mode: vectorized @@ -1546,10 +1545,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1587,7 +1585,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1726,10 +1724,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -1761,11 +1758,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 3, 4, 5, 6, 7, 8] + partitionColumns: 2:string + valueColumns: 0:int, 3:string, 4:string, 5:int, 6:string, 7:double, 8:string Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_partkey (type: int), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string) Execution mode: vectorized @@ -1830,7 +1827,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: string, _col7: double, _col8: string partition by: _col2 raw input shape: @@ -1939,7 +1936,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST, p_size DESC NULLS LAST + order by: p_name ASC NULLS LAST, p_size DESC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int partition by: p_mfgr raw input shape: @@ -1977,7 +1974,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -1993,7 +1990,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aaz + reduceColumnNullOrder: azz reduceColumnSortOrder: ++- allNative: false usesVectorUDFAdaptor: false @@ -2021,7 +2018,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST, _col5 DESC NULLS LAST + order by: _col1 ASC NULLS LAST, _col5 DESC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2161,7 +2158,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: p_name ASC NULLS FIRST + order by: p_name ASC NULLS LAST output shape: p_name: string, p_mfgr: string, p_size: int, p_retailprice: double partition by: p_mfgr raw input shape: @@ -2200,7 +2197,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2217,7 +2214,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2245,7 +2242,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2403,11 +2400,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -2446,7 +2443,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2462,7 +2459,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2490,7 +2487,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2651,11 +2648,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -2694,7 +2691,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2708,7 +2705,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2716,7 +2713,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2748,7 +2745,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2756,7 +2753,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -2772,7 +2769,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -2800,7 +2797,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -2968,11 +2965,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -3011,7 +3008,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -3042,7 +3039,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3186,11 +3183,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [0, 5, 7] + partitionColumns: 2:string + valueColumns: 0:int, 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_partkey (type: int), p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -3231,10 +3228,9 @@ STAGE PLANS: Map-reduce partition columns: p_partkey (type: int) Reduce Sink Vectorization: className: VectorReduceSinkLongOperator - keyColumnNums: [0] + keyColumns: 0:int native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [] Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map Vectorization: @@ -3272,7 +3268,7 @@ STAGE PLANS: Partition table definition input alias: abc name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col0: int, _col1: string, _col2: string, _col5: int, _col7: double partition by: _col2 raw input shape: @@ -3327,7 +3323,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -3479,11 +3475,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5] + partitionColumns: 2:string + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -3522,7 +3518,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -3714,11 +3710,11 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [0, 1] + keyColumns: 0:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [0] - valueColumnNums: [2] + partitionColumns: 0:string + valueColumns: 2:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: double) Execution mode: vectorized @@ -3763,7 +3759,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col0 ASC NULLS FIRST + order by: _col0 ASC NULLS LAST output shape: _col0: string, _col1: string, _col2: double partition by: _col0 raw input shape: @@ -3794,7 +3790,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -3971,11 +3967,11 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkObjectHashOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - partitionColumnNums: [2] - valueColumnNums: [5, 7] + partitionColumns: 2:string + valueColumns: 5:int, 7:double Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int), p_retailprice (type: double) Execution mode: vectorized @@ -3999,7 +3995,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -4027,7 +4023,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4105,7 +4101,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -4146,7 +4142,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST + order by: _col3 ASC NULLS LAST, _col2 ASC NULLS LAST partition by: _col3 raw input shape: window functions: @@ -4209,7 +4205,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE @@ -4238,7 +4234,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE @@ -4460,10 +4456,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 5] + valueColumns: 1:string, 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_name (type: string), p_size (type: int) Execution mode: vectorized @@ -4502,14 +4498,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4523,7 +4519,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4531,7 +4527,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4563,7 +4559,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4571,7 +4567,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4805,10 +4801,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string) Reduce Sink Vectorization: className: VectorReduceSinkStringOperator - keyColumnNums: [2] + keyColumns: 2:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [1, 5] + valueColumns: 1:string, 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_name (type: string), p_size (type: int) Execution mode: vectorized @@ -4847,14 +4843,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4885,7 +4881,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -4916,7 +4912,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -4932,7 +4928,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -4960,7 +4956,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5146,10 +5142,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -5188,14 +5184,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5226,14 +5222,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5249,7 +5245,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -5277,7 +5273,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -5464,10 +5460,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -5506,14 +5502,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5544,7 +5540,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5558,7 +5554,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5591,7 +5587,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5823,10 +5819,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -5865,7 +5861,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -5879,7 +5875,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5887,7 +5883,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5919,7 +5915,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5927,7 +5923,7 @@ STAGE PLANS: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2 raw input shape: @@ -5943,7 +5939,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: zz reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -5971,7 +5967,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST partition by: _col2, _col1 raw input shape: window functions: @@ -6152,10 +6148,10 @@ STAGE PLANS: Map-reduce partition columns: p_mfgr (type: string), p_name (type: string) Reduce Sink Vectorization: className: VectorReduceSinkMultiKeyOperator - keyColumnNums: [2, 1] + keyColumns: 2:string, 1:string native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - valueColumnNums: [5] + valueColumns: 5:int Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE value expressions: p_size (type: int) Execution mode: vectorized @@ -6194,14 +6190,14 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: Partition table definition input alias: ptf_2 name: noop - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -6215,7 +6211,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -6248,7 +6244,7 @@ STAGE PLANS: Partition table definition input alias: ptf_1 name: noopwithmap - order by: _col2 ASC NULLS FIRST, _col1 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST, _col1 ASC NULLS LAST output shape: _col1: string, _col2: string, _col5: int partition by: _col2, _col1 raw input shape: @@ -6265,7 +6261,7 @@ STAGE PLANS: Reduce Vectorization: enabled: true enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true - reduceColumnNullOrder: aa + reduceColumnNullOrder: az reduceColumnSortOrder: ++ allNative: false usesVectorUDFAdaptor: false @@ -6293,7 +6289,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out index 3944542475..a172ffc7c1 100644 --- ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out @@ -258,7 +258,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -269,7 +269,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17] selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -277,7 +277,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp) Execution mode: vectorized Map Vectorization: @@ -305,13 +305,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -369,6 +369,46 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.628 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.628 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.637 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.637 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.64 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.64 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.661 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.661 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.676 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.676 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.705 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.705 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.709 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.709 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.72 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.72 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.721 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.721 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.749 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.749 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.771 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.771 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.773 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.773 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.782 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.782 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.783 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.783 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.807 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.807 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.82 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.82 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.619 1969-12-31 16:00:14.793 1969-12-31 23:59:43.619 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.619 1969-12-31 23:59:43.619 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.627 1969-12-31 16:00:03.679 1969-12-31 23:59:43.627 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.627 1969-12-31 23:59:43.627 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.631 1969-12-31 16:00:06.612 1969-12-31 23:59:43.631 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.631 1969-12-31 23:59:43.631 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.642 1969-12-31 16:00:04.424 1969-12-31 23:59:43.642 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.642 1969-12-31 23:59:43.642 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.643 1969-12-31 16:00:11.764 1969-12-31 23:59:43.643 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.643 1969-12-31 23:59:43.643 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.645 1969-12-31 16:00:00.077 1969-12-31 23:59:43.645 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.645 1969-12-31 23:59:43.645 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.661 1969-12-31 15:59:58.732 1969-12-31 23:59:43.661 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.661 1969-12-31 23:59:43.661 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.689 1969-12-31 15:59:46.848 1969-12-31 23:59:43.689 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.689 1969-12-31 23:59:43.689 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.695 1969-12-31 16:00:06.867 1969-12-31 23:59:43.695 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.695 1969-12-31 23:59:43.695 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.707 1969-12-31 15:59:56.965 1969-12-31 23:59:43.707 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.707 1969-12-31 23:59:43.707 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.71 1969-12-31 16:00:00.687 1969-12-31 23:59:43.71 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.71 1969-12-31 23:59:43.71 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.723 1969-12-31 16:00:03.375 1969-12-31 23:59:43.723 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.723 1969-12-31 23:59:43.723 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.745 1969-12-31 16:00:04.052 1969-12-31 23:59:43.745 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.745 1969-12-31 23:59:43.745 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.764 1969-12-31 16:00:10.52 1969-12-31 23:59:43.764 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.764 1969-12-31 23:59:43.764 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.775 1969-12-31 15:59:48.003 1969-12-31 23:59:43.775 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.775 1969-12-31 23:59:43.775 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.779 1969-12-31 15:59:53.274 1969-12-31 23:59:43.779 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.779 1969-12-31 23:59:43.779 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.785 1969-12-31 16:00:14.096 1969-12-31 23:59:43.785 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.785 1969-12-31 23:59:43.785 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.792 1969-12-31 15:59:52.041 1969-12-31 23:59:43.792 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.792 1969-12-31 23:59:43.792 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.793 1969-12-31 15:59:56.316 1969-12-31 23:59:43.793 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.793 1969-12-31 23:59:43.793 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.811 1969-12-31 16:00:00.479 1969-12-31 23:59:43.811 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.811 1969-12-31 23:59:43.811 NULL +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 -45479202281 528 10 25 25 43 8 15 18 true 0528-10-27 08:15:18.941718273 NULL 0528-10-27 08:15:18.941718273 2000-12-18 08:42:30.0005 0528-10-27 08:15:18.941718273 0528-10-27 08:15:18.941718273 NULL 1632453512 2021 9 24 24 38 3 18 32 NULL 2021-09-24 03:18:32.4 1974-10-04 17:21:03.989 1319-02-02 16:31:57.778 2021-09-24 03:18:32.4 1974-10-04 17:21:03.989 NULL 1974-10-04 17:21:03.989 1632453512 2021 9 24 24 38 3 18 32 false 2021-09-24 03:18:32.4 1999-10-03 16:59:10.396903939 1319-02-02 16:31:57.778 2021-09-24 03:18:32.4 1999-10-03 16:59:10.396903939 NULL 1999-10-03 16:59:10.396903939 @@ -378,47 +418,7 @@ POSTHOOK: Input: default@alltypesorc_string 163809583224 7160 12 2 2 48 6 0 24 NULL 7160-12-02 06:00:24.81200852 NULL 1319-02-02 16:31:57.778 7160-12-02 06:00:24.81200852 NULL NULL NULL 490699811 1985 7 20 20 29 9 30 11 true 1985-07-20 09:30:11 1319-02-02 16:31:57.778 1985-07-20 09:30:11 2000-12-18 08:42:30.0005 1985-07-20 09:30:11 1985-07-20 09:30:11 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:47.183 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:47.183 NULL 1969-12-31 15:59:47.183 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:52.843 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:52.843 NULL 1969-12-31 15:59:52.843 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:53.087 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:53.087 NULL 1969-12-31 15:59:53.087 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:53.55 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:53.55 NULL 1969-12-31 15:59:53.55 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:54.042 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:54.042 NULL 1969-12-31 15:59:54.042 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:54.686 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:54.686 NULL 1969-12-31 15:59:54.686 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:58.459 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:58.459 NULL 1969-12-31 15:59:58.459 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:00.889 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:00.889 NULL 1969-12-31 16:00:00.889 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:01.258 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:01.258 NULL 1969-12-31 16:00:01.258 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:05.698 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:05.698 NULL 1969-12-31 16:00:05.698 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:08.602 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:08.602 NULL 1969-12-31 16:00:08.602 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:14.214 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:14.214 NULL 1969-12-31 16:00:14.214 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:15.466 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:15.466 NULL 1969-12-31 16:00:15.466 NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:46.123 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:49.989 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:51.119 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:52.961 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:52.967 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:53.593 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:53.641 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:55.407 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:55.439 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:56.031 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:57.719 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:58.636 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.176 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.423 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.477 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.93 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:01.839 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:02.13 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:03.151 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:03.756 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:06.134 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:07.209 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:10.361 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:11.525 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:13.589 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:13.839 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:15.601 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 2024-11-11 16:42:41.101 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL NULL NULL 2000-12-18 08:42:30.0005 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT @@ -466,7 +466,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -477,7 +477,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 7, 8, 9, 6, 11, 10, 13, 14] selectExpressions: VectorUDFUnixTimeStampString(col 2:string) -> 5:bigint, VectorUDFYearDate(col 6, field YEAR)(children: CastStringToDate(col 2:string) -> 6:date) -> 7:int, VectorUDFMonthDate(col 6, field MONTH)(children: CastStringToDate(col 2:string) -> 6:date) -> 8:int, VectorUDFDayOfMonthDate(col 6, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 6:date) -> 9:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int, VectorUDFWeekOfYearDate(col 10, field WEEK_OF_YEAR)(children: CastStringToDate(col 2:string) -> 10:date) -> 11:int, VectorUDFHourTimestamp(col 12:timestamp, field HOUR_OF_DAY)(children: CastStringToTimestamp(col 2:string) -> 12:timestamp) -> 10:int, VectorUDFMinuteTimestamp(col 12:timestamp, field MINUTE)(children: CastStringToTimestamp(col 2:string) -> 12:timestamp) -> 13:int, VectorUDFSecondTimestamp(col 12:timestamp, field SECOND)(children: CastStringToTimestamp(col 2:string) -> 12:timestamp) -> 14:int - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -485,7 +485,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized Map Vectorization: @@ -513,13 +513,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -561,7 +561,47 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 -2736272726 1883 4 17 17 16 4 14 34 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 -62018199211 4 9 24 22 39 18 26 29 1365554626 2013 4 10 10 15 0 43 46 206730996125 8521 1 16 16 3 20 42 5 @@ -573,46 +613,6 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1) AS c1, year(ctimestamp1) = year(stimestamp1), @@ -658,7 +658,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -669,7 +669,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [7, 6, 8, 9, 11, 10, 14, 15, 16] selectExpressions: LongColEqualLongColumn(col 5:bigint, col 6:bigint)(children: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFUnixTimeStampString(col 2:string) -> 6:bigint) -> 7:boolean, LongColEqualLongColumn(col 5:int, col 8:int)(children: VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 5:int, VectorUDFYearDate(col 6, field YEAR)(children: CastStringToDate(col 2:string) -> 6:date) -> 8:int) -> 6:boolean, LongColEqualLongColumn(col 5:int, col 9:int)(children: VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 5:int, VectorUDFMonthDate(col 8, field MONTH)(children: CastStringToDate(col 2:string) -> 8:date) -> 9:int) -> 8:boolean, LongColEqualLongColumn(col 5:int, col 10:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthDate(col 9, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 9:date) -> 10:int) -> 9:boolean, LongColEqualLongColumn(col 5:int, col 10:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 10:int) -> 11:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 5:int, VectorUDFWeekOfYearDate(col 10, field WEEK_OF_YEAR)(children: CastStringToDate(col 2:string) -> 10:date) -> 12:int) -> 10:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 5:int, VectorUDFHourTimestamp(col 13:timestamp, field HOUR_OF_DAY)(children: CastStringToTimestamp(col 2:string) -> 13:timestamp) -> 12:int) -> 14:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 5:int, VectorUDFMinuteTimestamp(col 13:timestamp, field MINUTE)(children: CastStringToTimestamp(col 2:string) -> 13:timestamp) -> 12:int) -> 15:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 5:int, VectorUDFSecondTimestamp(col 13:timestamp, field SECOND)(children: CastStringToTimestamp(col 2:string) -> 13:timestamp) -> 12:int) -> 16:boolean - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + @@ -677,7 +677,7 @@ STAGE PLANS: className: VectorReduceSinkObjectHashOperator native: true nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized Map Vectorization: @@ -705,13 +705,13 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8] - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -761,50 +761,50 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT to_unix_timestamp(stimestamp1) AS c1, year(stimestamp1), @@ -981,7 +981,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -991,7 +991,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(ctimestamp1), max(ctimestamp1), count(ctimestamp1), count() Group By Vectorization: @@ -1078,7 +1078,7 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -0528-10-27 08:15:18.941718273 7160-12-02 06:00:24.81200852 8 52 +0528-10-27 08:15:18.941718273 7160-12-02 06:00:24.81200852 48 52 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT round(sum(ctimestamp1), 3) FROM alltypesorc_string @@ -1106,7 +1106,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1116,7 +1116,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(ctimestamp1) Group By Vectorization: @@ -1206,7 +1206,7 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -2.89160478029166E11 +2.891604773267E11 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT round(avg(ctimestamp1), 0), variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19, @@ -1248,7 +1248,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1259,7 +1259,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 8] selectExpressions: CastTimestampToDouble(col 1:timestamp) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastTimestampToDouble(col 1:timestamp) -> 6:double, CastTimestampToDouble(col 1:timestamp) -> 7:double) -> 8:double - Statistics: Num rows: 52 Data size: 3219 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0), count(_col0), sum(_col2), sum(_col1) Group By Vectorization: @@ -1363,4 +1363,4 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -3.6145059754E10 false false false 7.5245178084814E10 7.5245178084814E10 7.5245178084814E10 8.0440478971476E10 +6.024176611E9 false false false 3.3542405863247E10 3.3542405863247E10 3.3542405863247E10 3.3897361841912E10 diff --git ql/src/test/results/clientpositive/spark/windowing.q.out ql/src/test/results/clientpositive/spark/windowing.q.out index 7967d05cd2..e2295c166a 100644 --- ql/src/test/results/clientpositive/spark/windowing.q.out +++ ql/src/test/results/clientpositive/spark/windowing.q.out @@ -1856,7 +1856,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/stat_estimate_drill.q.out ql/src/test/results/clientpositive/stat_estimate_drill.q.out index cb025223a4..84bf86fd00 100644 --- ql/src/test/results/clientpositive/stat_estimate_drill.q.out +++ ql/src/test/results/clientpositive/stat_estimate_drill.q.out @@ -84,16 +84,16 @@ STAGE PLANS: TableScan alias: t3 filterExpr: (b) IN (2, 3) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (b) IN (2, 3) (type: boolean) - Statistics: Num rows: 200/200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200/1 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: b (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 2/2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2/1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -149,16 +149,16 @@ STAGE PLANS: TableScan alias: t3 filterExpr: (a) IN (1, 2) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (a) IN (1, 2) (type: boolean) - Statistics: Num rows: 200/200 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 200/1 Data size: 1600 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: b (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10/10 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10/1 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -214,16 +214,16 @@ STAGE PLANS: TableScan alias: t3 filterExpr: ((a = 1) or ((a = 2) and (b = 3))) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((a = 2) and (b = 3)) or (a = 1)) (type: boolean) - Statistics: Num rows: 110/110 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 110/1 Data size: 880 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: b (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10/10 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10/1 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -279,20 +279,20 @@ STAGE PLANS: TableScan alias: t3 filterExpr: (a = 1) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (a = 1) (type: boolean) - Statistics: Num rows: 100/100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 100/1 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: b (type: int) outputColumnNames: _col0 - Statistics: Num rows: 100/100 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 100/1 Data size: 800 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(1) keys: _col0 (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10/10 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10/1 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -348,12 +348,12 @@ STAGE PLANS: TableScan alias: t3 filterExpr: ((a = 1) and (b = 2)) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((a = 1) and (b = 2)) (type: boolean) - Statistics: Num rows: 10/10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10/1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 10/10 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10/1 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(1) keys: true (type: boolean) @@ -415,7 +415,7 @@ STAGE PLANS: TableScan alias: t3 filterExpr: ((a = 1) and (b = 2) and (c = 3)) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 12000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 12000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((a = 1) and (b = 2) and (c = 3)) (type: boolean) Statistics: Num rows: 1/1 Data size: 12 Basic stats: COMPLETE Column stats: COMPLETE @@ -482,16 +482,16 @@ STAGE PLANS: TableScan alias: t3 filterExpr: (struct(a,b)) IN (const struct(1,2), const struct(2,3), const struct(3,4)) (type: boolean) - Statistics: Num rows: 1000/1000 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1000/1 Data size: 8000 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (struct(a,b)) IN (const struct(1,2), const struct(2,3), const struct(3,4)) (type: boolean) - Statistics: Num rows: 30/30 Data size: 240 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 30/1 Data size: 240 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: b (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10/3 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10/1 Data size: 120 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + diff --git ql/src/test/results/clientpositive/stat_estimate_related_col.q.out ql/src/test/results/clientpositive/stat_estimate_related_col.q.out index 2a2b7a8239..ac8b93072f 100644 --- ql/src/test/results/clientpositive/stat_estimate_related_col.q.out +++ ql/src/test/results/clientpositive/stat_estimate_related_col.q.out @@ -89,16 +89,16 @@ STAGE PLANS: TableScan alias: t8 filterExpr: (b) IN (2, 3) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (b) IN (2, 3) (type: boolean) - Statistics: Num rows: 16/16 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 16/1 Data size: 128 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: b (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 2/2 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 2/1 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: int) sort order: + @@ -154,14 +154,14 @@ STAGE PLANS: TableScan alias: t8 filterExpr: (b = 2) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (b = 2) (type: boolean) - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: a (type: int) outputColumnNames: a - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: 2 (type: int) @@ -223,7 +223,7 @@ STAGE PLANS: TableScan alias: t1 filterExpr: ((2 = b) and (b = 2)) (type: boolean) - Statistics: Num rows: 5/5 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5/1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((2 = b) and (b = 2)) (type: boolean) Statistics: Num rows: 1/1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE @@ -292,7 +292,7 @@ STAGE PLANS: TableScan alias: t1 filterExpr: ((b) IN (2, 3) and (b = 2)) (type: boolean) - Statistics: Num rows: 5/5 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 5/1 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((b = 2) and (b) IN (2, 3)) (type: boolean) Statistics: Num rows: 1/1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE @@ -361,14 +361,14 @@ STAGE PLANS: TableScan alias: t8 filterExpr: ((b) IN (2, 3) and (b = 2)) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((b = 2) and (b) IN (2, 3)) (type: boolean) - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: a (type: int) outputColumnNames: a - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: 2 (type: int) @@ -524,14 +524,14 @@ STAGE PLANS: TableScan alias: t8 filterExpr: ((b) IN (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50) and (b = 2) and (2 = b)) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: ((2 = b) and (b = 2) and (b) IN (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50)) (type: boolean) - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: a (type: int) outputColumnNames: a - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: 2 (type: int) @@ -593,14 +593,14 @@ STAGE PLANS: TableScan alias: t8 filterExpr: ((b = 2) and ((b = 1) or (b = 2))) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((b = 1) or (b = 2)) and (b = 2)) (type: boolean) - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: a (type: int) outputColumnNames: a - Statistics: Num rows: 8/8 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 8/1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(a) keys: 2 (type: int) @@ -662,7 +662,7 @@ STAGE PLANS: TableScan alias: t8 filterExpr: ((b = 2) and ((b = 1) or (b = 2)) and ((b = 1) or (b = 3))) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((b = 1) or (b = 2)) and ((b = 1) or (b = 3)) and (b = 2)) (type: boolean) Statistics: Num rows: 8/0 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE @@ -747,7 +747,7 @@ STAGE PLANS: TableScan alias: t8 filterExpr: ((b = 2) and ((b = 1) or (b = 2)) and (a = 3) and ((a = 3) or (a = 4))) (type: boolean) - Statistics: Num rows: 40/40 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 40/1 Data size: 320 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (((a = 3) or (a = 4)) and ((b = 1) or (b = 2)) and (a = 3) and (b = 2)) (type: boolean) Statistics: Num rows: 2/0 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE diff --git ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out index c65b04c47f..dbc6f5d008 100644 --- ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out +++ ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out @@ -324,7 +324,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -493,7 +493,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col2 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out index 634b4ea7a3..192f57761b 100644 --- ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out +++ ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out @@ -97,7 +97,6 @@ POSTHOOK: query: select distinct si, si%4 from over10k_n9 order by si POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k_n9 POSTHOOK: Output: hdfs://### HDFS PATH ### -NULL NULL 256 0 257 1 258 2 @@ -350,6 +349,7 @@ NULL NULL 509 1 510 2 511 3 +NULL NULL PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9 PREHOOK: type: QUERY PREHOOK: Input: default@over10k_n9 diff --git ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out index dee97dbe37..9ca64531b6 100644 --- ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out +++ ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out @@ -353,9 +353,9 @@ Stage-3 Map 1 vectorized File Output Operator [FS_4] table:{"name:":"default.src_autho_test_n4"} - Select Operator [SEL_3] (rows=500/500 width=178) + Select Operator [SEL_3] (rows=500/1 width=178) Output:["_col0","_col1"] - TableScan [TS_0] (rows=500/500 width=178) + TableScan [TS_0] (rows=500/1 width=178) default@src,src,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] Stage-0 Move Operator @@ -612,15 +612,15 @@ Stage-0 Stage-1 Reducer 2 vectorized File Output Operator [FS_10] - Limit [LIM_9] (rows=5/5 width=178) + Limit [LIM_9] (rows=5/3 width=178) Number of rows:5 - Select Operator [SEL_8] (rows=500/5 width=178) + Select Operator [SEL_8] (rows=500/3 width=178) Output:["_col0","_col1"] <-Map 1 [SIMPLE_EDGE] vectorized SHUFFLE [RS_7] - Select Operator [SEL_6] (rows=500/500 width=178) + Select Operator [SEL_6] (rows=500/1 width=178) Output:["_col0","_col1"] - TableScan [TS_0] (rows=500/500 width=178) + TableScan [TS_0] (rows=500/1 width=178) default@src,src,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] PREHOOK: query: create table orc_merge5_n1 (userid bigint, string1 string, subtype double, decimal1 decimal(38,0), ts timestamp) stored as orc @@ -679,7 +679,7 @@ Stage-3 Output:["_col0","_col1","_col2","_col3","_col4"] Filter Operator [FIL_12] (rows=1/3 width=352) predicate:(userid <= 13L) - TableScan [TS_0] (rows=1/15000 width=352) + TableScan [TS_0] (rows=1/15 width=352) default@orc_merge5_n1,orc_merge5_n1,Tbl:COMPLETE,Col:NONE,Output:["userid","string1","subtype","decimal1","ts"] PARTITION_ONLY_SHUFFLE [RS_16] Select Operator [SEL_15] (rows=1/3 width=352) @@ -847,23 +847,23 @@ Stage-0 Stage-1 Map 2 vectorized File Output Operator [FS_34] - Select Operator [SEL_33] (rows=399/480 width=186) + Select Operator [SEL_33] (rows=399/50 width=186) Output:["_col0","_col1","_col2"] - Map Join Operator [MAPJOIN_32] (rows=399/480 width=186) + Map Join Operator [MAPJOIN_32] (rows=399/50 width=186) BucketMapJoin:true,Conds:RS_29._col0=SEL_31._col0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col3"] <-Map 1 [CUSTOM_EDGE] vectorized MULTICAST [RS_29] PartitionCols:_col0 - Select Operator [SEL_28] (rows=242/242 width=95) + Select Operator [SEL_28] (rows=242/4 width=95) Output:["_col0","_col1"] - Filter Operator [FIL_27] (rows=242/242 width=95) + Filter Operator [FIL_27] (rows=242/4 width=95) predicate:key is not null - TableScan [TS_0] (rows=242/242 width=95) + TableScan [TS_0] (rows=242/4 width=95) default@tab_n2,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] - <-Select Operator [SEL_31] (rows=500/500 width=95) + <-Select Operator [SEL_31] (rows=500/4 width=95) Output:["_col0","_col1"] - Filter Operator [FIL_30] (rows=500/500 width=95) + Filter Operator [FIL_30] (rows=500/4 width=95) predicate:key is not null - TableScan [TS_3] (rows=500/500 width=95) + TableScan [TS_3] (rows=500/4 width=95) default@tab_part_n3,b,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] diff --git ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out index 7d060307cb..a6acaf0335 100644 --- ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out +++ ql/src/test/results/clientpositive/tez/explainanalyze_4.q.out @@ -338,7 +338,7 @@ Stage-0 <-Reducer 2 [SIMPLE_EDGE] SHUFFLE [RS_10] Map Join Operator [MAPJOIN_27] (rows=1501/10 width=236) - Conds:RS_6.KEY.reducesinkkey0=RS_7.KEY.reducesinkkey0(Inner),HybridGraceHashJoin:true,Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"] + Conds:RS_6.KEY.reducesinkkey0=RS_7.KEY.reducesinkkey0(Inner),DynamicPartitionHashJoin:true,HybridGraceHashJoin:true,Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"] <-Map 4 [CUSTOM_SIMPLE_EDGE] PARTITION_ONLY_SHUFFLE [RS_7] PartitionCols:_col2 @@ -433,7 +433,7 @@ Stage-0 <-Reducer 2 [CUSTOM_SIMPLE_EDGE] PARTITION_ONLY_SHUFFLE [RS_10] Map Join Operator [MAPJOIN_28] (rows=1501/10 width=236) - Conds:RS_6.KEY.reducesinkkey0=RS_7.KEY.reducesinkkey0(Inner),HybridGraceHashJoin:true + Conds:RS_6.KEY.reducesinkkey0=RS_7.KEY.reducesinkkey0(Inner),DynamicPartitionHashJoin:true,HybridGraceHashJoin:true <-Map 4 [CUSTOM_SIMPLE_EDGE] PARTITION_ONLY_SHUFFLE [RS_7] PartitionCols:_col0 @@ -531,7 +531,7 @@ Stage-0 SHUFFLE [RS_10] PartitionCols:_col0 Map Join Operator [MAPJOIN_30] (rows=1501/10 width=236) - Conds:RS_6.KEY.reducesinkkey0=RS_7.KEY.reducesinkkey0(Inner),HybridGraceHashJoin:true,Output:["_col0"] + Conds:RS_6.KEY.reducesinkkey0=RS_7.KEY.reducesinkkey0(Inner),DynamicPartitionHashJoin:true,HybridGraceHashJoin:true,Output:["_col0"] <-Map 5 [CUSTOM_SIMPLE_EDGE] PARTITION_ONLY_SHUFFLE [RS_7] PartitionCols:_col0 diff --git ql/src/test/results/clientpositive/udtf_json_tuple.q.out ql/src/test/results/clientpositive/udtf_json_tuple.q.out index d500c14c64..45a8f464dd 100644 --- ql/src/test/results/clientpositive/udtf_json_tuple.q.out +++ ql/src/test/results/clientpositive/udtf_json_tuple.q.out @@ -189,12 +189,12 @@ POSTHOOK: query: select json_tuple(a.jstring, 'f1', 'f2', 'f3', 'f4', 'f5') as ( POSTHOOK: type: QUERY POSTHOOK: Input: default@json_t #### A masked pattern was here #### -NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL value1 value2 3 NULL 5.23 value12 2 value3 4.01 NULL value13 2 value33 value44 5.01 +NULL NULL NULL NULL NULL +NULL NULL NULL NULL NULL PREHOOK: query: explain select a.key, b.f2, b.f5 from json_t a lateral view json_tuple(a.jstring, 'f1', 'f2', 'f3', 'f4', 'f5') b as f1, f2, f3, f4, f5 order by a.key PREHOOK: type: QUERY @@ -403,9 +403,9 @@ POSTHOOK: query: select f2, count(*) from json_t a lateral view json_tuple(a.jst POSTHOOK: type: QUERY POSTHOOK: Input: default@json_t #### A masked pattern was here #### -NULL 1 2 2 value2 1 +NULL 1 PREHOOK: query: CREATE TABLE dest1_n65(c1 STRING) STORED AS RCFILE PREHOOK: type: CREATETABLE PREHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out index 51e23e53ca..c122bf686c 100644 --- ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out +++ ql/src/test/results/clientpositive/udtf_parse_url_tuple.q.out @@ -208,12 +208,12 @@ POSTHOOK: query: select parse_url_tuple(a.fullurl, 'HOST', 'PATH', 'QUERY', 'REF POSTHOOK: type: QUERY POSTHOOK: Input: default@url_t #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL facebook.com /path1/p.php k1=v1&k2=v2 Ref1 http /path1/p.php?k1=v1&k2=v2 facebook.com NULL v1 sites.google.com /a/example.com/site/page NULL NULL ftp /a/example.com/site/page sites.google.com NULL NULL www.socs.uts.edu.au /MosaicDocs-old/url-primer.html k1=tps chapter1 https /MosaicDocs-old/url-primer.html?k1=tps www.socs.uts.edu.au:80 NULL tps +NULL NULL NULL NULL NULL NULL NULL NULL NULL +NULL NULL NULL NULL NULL NULL NULL NULL NULL +NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain select a.key, b.ho, b.qu, b.qk1, b.err1, b.err2, b.err3 from url_t a lateral view parse_url_tuple(a.fullurl, 'HOST', 'PATH', 'QUERY', 'REF', 'PROTOCOL', 'FILE', 'AUTHORITY', 'USERINFO', 'QUERY:k1', 'host', 'query', 'QUERY:nonExistCol') b as ho, pa, qu, re, pr, fi, au, us, qk1, err1, err2, err3 order by a.key PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/union_null.q.out ql/src/test/results/clientpositive/union_null.q.out index 00bd9d93a1..696641cbd8 100644 --- ql/src/test/results/clientpositive/union_null.q.out +++ ql/src/test/results/clientpositive/union_null.q.out @@ -6,16 +6,16 @@ POSTHOOK: query: select x from (select * from (select value as x from src order POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### -val_0 -val_0 -val_0 -val_10 -val_100 NULL NULL NULL NULL NULL +val_0 +val_0 +val_0 +val_10 +val_100 PREHOOK: query: select x from (select * from (select value as x from src order by x limit 5)a union all select * from (select cast(NULL as string) as x from src limit 5)b )a PREHOOK: type: QUERY PREHOOK: Input: default@src @@ -82,5 +82,5 @@ POSTHOOK: query: select null as c1 UNION ALL select 1 as c1 POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table #### A masked pattern was here #### -NULL 1 +NULL diff --git ql/src/test/results/clientpositive/union_offcbo.q.out ql/src/test/results/clientpositive/union_offcbo.q.out index 94d197f952..90acc1988e 100644 --- ql/src/test/results/clientpositive/union_offcbo.q.out +++ ql/src/test/results/clientpositive/union_offcbo.q.out @@ -281,7 +281,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col8 (type: string) 1 _col8 (type: string) @@ -373,7 +373,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col8 (type: string) 1 _col8 (type: string) @@ -969,7 +969,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col8 (type: string) 1 _col8 (type: string) @@ -1075,7 +1075,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col8 (type: string) 1 _col8 (type: string) @@ -1322,7 +1322,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col8 (type: string) 1 _col8 (type: string) @@ -1426,7 +1426,7 @@ STAGE PLANS: Reduce Operator Tree: Join Operator condition map: - Outer Join 0 to 1 + Full Outer Join 0 to 1 keys: 0 _col8 (type: string) 1 _col8 (type: string) diff --git ql/src/test/results/clientpositive/union_ppr.q.out ql/src/test/results/clientpositive/union_ppr.q.out index 2d24bc9e09..b6c46427fd 100644 --- ql/src/test/results/clientpositive/union_ppr.q.out +++ ql/src/test/results/clientpositive/union_ppr.q.out @@ -45,7 +45,7 @@ STAGE PLANS: Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE tag: -1 @@ -71,7 +71,7 @@ STAGE PLANS: Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string) - null sort order: aaaa + null sort order: zzzz sort order: ++++ Statistics: Num rows: 666 Data size: 7074 Basic stats: COMPLETE Column stats: NONE tag: -1 diff --git ql/src/test/results/clientpositive/union_remove_6_subq.q.out ql/src/test/results/clientpositive/union_remove_6_subq.q.out index daf05f225a..80b8bbffed 100644 --- ql/src/test/results/clientpositive/union_remove_6_subq.q.out +++ ql/src/test/results/clientpositive/union_remove_6_subq.q.out @@ -525,7 +525,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/update_all_partitioned.q.out ql/src/test/results/clientpositive/update_all_partitioned.q.out index 4ee1e2e4cf..78c02ceea2 100644 --- ql/src/test/results/clientpositive/update_all_partitioned.q.out +++ ql/src/test/results/clientpositive/update_all_partitioned.q.out @@ -53,11 +53,11 @@ POSTHOOK: Input: default@acid_uap@ds=tomorrow 762 BLoMwUJ51ns6pd tomorrow 762 a10E76jX35YwquKCTA tomorrow 762 q5y2Vy1 tomorrow -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow 6981 Y5x3JuI3M8jngv5N tomorrow +6981 YdG61y00526u5 tomorrow PREHOOK: query: update acid_uap set b = 'fred' PREHOOK: type: QUERY PREHOOK: Input: default@acid_uap diff --git ql/src/test/results/clientpositive/update_tmp_table.q.out ql/src/test/results/clientpositive/update_tmp_table.q.out index 446a3797b8..746fb9f82e 100644 --- ql/src/test/results/clientpositive/update_tmp_table.q.out +++ ql/src/test/results/clientpositive/update_tmp_table.q.out @@ -50,7 +50,6 @@ POSTHOOK: query: select * from acid_utt order by a POSTHOOK: type: QUERY POSTHOOK: Input: default@acid_utt #### A masked pattern was here #### -NULL 0ruyd6Y50JpdGRf6HqD -1073279343 oj1YrV5Wa -1073051226 A34p7oRr2WvUJNf -1072910839 0iqrc5 @@ -60,3 +59,4 @@ NULL 0ruyd6Y50JpdGRf6HqD -1071363017 Anj0oF -1070551679 iUR3Q -1069736047 k17Am8uPHWk02cEf1jet +NULL 0ruyd6Y50JpdGRf6HqD diff --git ql/src/test/results/clientpositive/update_where_partitioned.q.out ql/src/test/results/clientpositive/update_where_partitioned.q.out index 1834e837c1..ac603b8176 100644 --- ql/src/test/results/clientpositive/update_where_partitioned.q.out +++ ql/src/test/results/clientpositive/update_where_partitioned.q.out @@ -53,11 +53,11 @@ POSTHOOK: Input: default@acid_uwp@ds=tomorrow 762 BLoMwUJ51ns6pd tomorrow 762 a10E76jX35YwquKCTA tomorrow 762 q5y2Vy1 tomorrow -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow 6981 Y5x3JuI3M8jngv5N tomorrow +6981 YdG61y00526u5 tomorrow PREHOOK: query: update acid_uwp set b = 'fred' where b = 'k17Am8uPHWk02cEf1jet' PREHOOK: type: QUERY PREHOOK: Input: default@acid_uwp @@ -99,8 +99,8 @@ POSTHOOK: Input: default@acid_uwp@ds=tomorrow 762 BLoMwUJ51ns6pd tomorrow 762 a10E76jX35YwquKCTA tomorrow 762 q5y2Vy1 tomorrow -6981 NULL tomorrow 6981 1FNNhmiFLGw425NA13g tomorrow 6981 4KhrrQ0nJ7bMNTvhSCA tomorrow 6981 K630vaVf tomorrow 6981 Y5x3JuI3M8jngv5N tomorrow +6981 YdG61y00526u5 tomorrow diff --git ql/src/test/results/clientpositive/vector_case_when_2.q.out ql/src/test/results/clientpositive/vector_case_when_2.q.out index b1cb3f26e0..b6acc1dd4c 100644 --- ql/src/test/results/clientpositive/vector_case_when_2.q.out +++ ql/src/test/results/clientpositive/vector_case_when_2.q.out @@ -235,7 +235,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamps #### A masked pattern was here #### ctimestamp1 ctimestamp2 ctimestamp2_description ctimestamp2_description_2 ctimestamp2_description_3 field1 field_2 field_3 field_4 field_5 -NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 0004-09-22 18:26:29.519542222 0004-09-21 16:23:25.519542222 1800s or Earlier Old Old 4 0004-09-22 18:26:29.519542222 26 NULL 0005-09-22 0528-10-27 08:15:18.941718273 0528-10-26 06:12:14.941718273 1800s or Earlier Old Old 528 2018-03-08 23:04:59 15 NULL 0529-10-27 1319-02-02 16:31:57.778 1319-02-01 14:28:53.778 1800s or Earlier Old Old 1319 1319-02-02 16:31:57.778 31 NULL 1320-02-02 @@ -286,6 +285,7 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14 9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12 9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09 +NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT ctimestamp1, @@ -494,7 +494,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamps #### A masked pattern was here #### ctimestamp1 ctimestamp2 ctimestamp2_description ctimestamp2_description_2 ctimestamp2_description_3 field1 field_2 field_3 field_4 field_5 -NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 0004-09-22 18:26:29.519542222 0004-09-21 16:23:25.519542222 1800s or Earlier Old Old 4 0004-09-22 18:26:29.519542222 26 NULL 0005-09-22 0528-10-27 08:15:18.941718273 0528-10-26 06:12:14.941718273 1800s or Earlier Old Old 528 2018-03-08 23:04:59 15 NULL 0529-10-27 1319-02-02 16:31:57.778 1319-02-01 14:28:53.778 1800s or Earlier Old Old 1319 1319-02-02 16:31:57.778 31 NULL 1320-02-02 @@ -545,6 +544,7 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14 9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12 9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09 +NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT ctimestamp1, @@ -753,7 +753,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@timestamps #### A masked pattern was here #### ctimestamp1 ctimestamp2 ctimestamp2_description ctimestamp2_description_2 ctimestamp2_description_3 field1 field_2 field_3 field_4 field_5 -NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 0004-09-22 18:26:29.519542222 0004-09-21 16:23:25.519542222 1800s or Earlier Old Old 4 0004-09-22 18:26:29.519542222 26 NULL 0005-09-22 0528-10-27 08:15:18.941718273 0528-10-26 06:12:14.941718273 1800s or Earlier Old Old 528 2018-03-08 23:04:59 15 NULL 0529-10-27 1319-02-02 16:31:57.778 1319-02-01 14:28:53.778 1800s or Earlier Old Old 1319 1319-02-02 16:31:57.778 31 NULL 1320-02-02 @@ -804,3 +803,4 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL 9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14 9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12 9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09 +NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL diff --git ql/src/test/results/clientpositive/vector_coalesce.q.out ql/src/test/results/clientpositive/vector_coalesce.q.out index 0d20a0eade..891839bc3a 100644 --- ql/src/test/results/clientpositive/vector_coalesce.q.out +++ ql/src/test/results/clientpositive/vector_coalesce.q.out @@ -73,16 +73,16 @@ LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL NULL -413196097 -51.0 NULL -413196097 -NULL NULL -413553449 11.0 NULL -413553449 -NULL NULL -457224565 11.0 NULL -457224565 -NULL NULL -591488718 -51.0 NULL -591488718 -NULL NULL -656987896 8.0 NULL -656987896 -NULL NULL -670908417 8.0 NULL -670908417 -NULL NULL -738306196 -51.0 NULL -738306196 -NULL NULL -819152895 8.0 NULL -819152895 -NULL NULL -827212561 8.0 NULL -827212561 -NULL NULL -949587513 11.0 NULL -949587513 +NULL 00MmJs1fiJp37y60mj4Ej8 -698191930 -51.0 NULL 00MmJs1fiJp37y60mj4Ej8 +NULL 00PafC7v 349566607 -51.0 NULL 00PafC7v +NULL 00iT08 284688862 -51.0 NULL 00iT08 +NULL 00k3yt70n476d6UQA -391432229 8.0 NULL 00k3yt70n476d6UQA +NULL 014ILGhXxNY7g02hl0Xw 633097881 11.0 NULL 014ILGhXxNY7g02hl0Xw +NULL 02VRbSC5I 551634127 8.0 NULL 02VRbSC5I +NULL 02k5poW73QsWM 891702124 11.0 NULL 02k5poW73QsWM +NULL 02v8WnLuYDos3Cq -648704945 8.0 NULL 02v8WnLuYDos3Cq +NULL 02vDyIVT752 388584379 11.0 NULL 02vDyIVT752 +NULL 0333uXvwB3ADRa4aP1h 336245146 8.0 NULL 0333uXvwB3ADRa4aP1h PREHOOK: query: EXPLAIN VECTORIZATION ONLY EXPRESSION SELECT ctinyint, cdouble, cint, coalesce(ctinyint+10, (cdouble+log2(cint)), 0) as c FROM alltypesorc WHERE (ctinyint IS NULL) @@ -158,16 +158,16 @@ LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL NULL -1015272448 0.0 -NULL NULL -609074876 0.0 -NULL NULL -700300206 0.0 -NULL NULL -726473298 0.0 -NULL NULL -738747840 0.0 -NULL NULL -838810013 0.0 -NULL NULL -850295959 0.0 -NULL NULL -886426182 0.0 -NULL NULL -899422227 0.0 -NULL NULL -971543377 0.0 +NULL -16269.0 -378213344 0.0 +NULL -16274.0 -671342269 0.0 +NULL -16296.0 -146635689 0.0 +NULL -16296.0 593429004 -16266.855499800256 +NULL -16300.0 -860437234 0.0 +NULL -16306.0 384405526 -16277.481946165259 +NULL -16307.0 559926362 -16277.939338135451 +NULL -16309.0 -826497289 0.0 +NULL -16310.0 206154150 -16282.380851737113 +NULL -16379.0 -894716315 0.0 PREHOOK: query: EXPLAIN VECTORIZATION ONLY EXPRESSION SELECT cfloat, cbigint, coalesce(cfloat, cbigint, 0) as c FROM alltypesorc WHERE (cfloat IS NULL AND cbigint IS NULL) @@ -324,16 +324,16 @@ LIMIT 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL 1969-12-31 15:59:30.929 1969-12-31 15:59:30.929 -NULL 1969-12-31 15:59:30.929 1969-12-31 15:59:30.929 -NULL 1969-12-31 15:59:30.929 1969-12-31 15:59:30.929 -NULL 1969-12-31 15:59:43.63 1969-12-31 15:59:43.63 -NULL 1969-12-31 15:59:43.658 1969-12-31 15:59:43.658 -NULL 1969-12-31 15:59:43.672 1969-12-31 15:59:43.672 -NULL 1969-12-31 15:59:43.684 1969-12-31 15:59:43.684 -NULL 1969-12-31 15:59:43.703 1969-12-31 15:59:43.703 -NULL 1969-12-31 15:59:43.704 1969-12-31 15:59:43.704 -NULL 1969-12-31 15:59:43.709 1969-12-31 15:59:43.709 +1969-12-31 15:59:30.929 1969-12-31 15:59:55.451 1969-12-31 15:59:30.929 +1969-12-31 15:59:30.929 1969-12-31 15:59:55.451 1969-12-31 15:59:30.929 +1969-12-31 15:59:30.929 1969-12-31 15:59:58.174 1969-12-31 15:59:30.929 +1969-12-31 15:59:30.929 1969-12-31 15:59:58.456 1969-12-31 15:59:30.929 +1969-12-31 15:59:43.619 1969-12-31 16:00:14.793 1969-12-31 15:59:43.619 +1969-12-31 15:59:43.627 1969-12-31 16:00:03.679 1969-12-31 15:59:43.627 +1969-12-31 15:59:43.628 1969-12-31 15:59:55.451 1969-12-31 15:59:43.628 +1969-12-31 15:59:43.631 1969-12-31 16:00:06.612 1969-12-31 15:59:43.631 +1969-12-31 15:59:43.637 1969-12-31 15:59:58.174 1969-12-31 15:59:43.637 +1969-12-31 15:59:43.64 1969-12-31 15:59:58.174 1969-12-31 15:59:43.64 PREHOOK: query: EXPLAIN VECTORIZATION ONLY EXPRESSION SELECT cfloat, cbigint, coalesce(cfloat, cbigint) as c FROM alltypesorc WHERE (cfloat IS NULL AND cbigint IS NULL) diff --git ql/src/test/results/clientpositive/vector_coalesce_4.q.out ql/src/test/results/clientpositive/vector_coalesce_4.q.out index c7c0da62aa..8cb5ffa545 100644 --- ql/src/test/results/clientpositive/vector_coalesce_4.q.out +++ ql/src/test/results/clientpositive/vector_coalesce_4.q.out @@ -116,5 +116,5 @@ POSTHOOK: query: select coalesce(a, b) from coalesce_test order by a, b POSTHOOK: type: QUERY POSTHOOK: Input: default@coalesce_test #### A masked pattern was here #### -NULL 1 +NULL diff --git ql/src/test/results/clientpositive/vector_data_types.q.out ql/src/test/results/clientpositive/vector_data_types.q.out index b9d8ac85fb..a971eed03e 100644 --- ql/src/test/results/clientpositive/vector_data_types.q.out +++ ql/src/test/results/clientpositive/vector_data_types.q.out @@ -169,10 +169,6 @@ POSTHOOK: query: SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_ POSTHOOK: type: QUERY POSTHOOK: Input: default@over1korc_n1 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL 374 65560 4294967516 65.43 22.48 true oscar quirinius 2013-03-01 09:11:58.703316 16.86 mathematics -NULL 409 65536 4294967490 46.97 25.92 false fred miller 2013-03-01 09:11:58.703116 33.45 history -NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703111 18.80 mathematics -3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95.39 undecided -3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88.46 wind surfing -3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75.39 education @@ -189,6 +185,10 @@ NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703 -2 461 65648 4294967425 58.52 24.85 false rachel thompson 2013-03-01 09:11:58.703318 85.62 zync studies -1 268 65778 4294967418 56.33 44.73 true calvin falkner 2013-03-01 09:11:58.70322 7.37 history -1 281 65643 4294967323 15.1 45.0 false irene nixon 2013-03-01 09:11:58.703223 80.96 undecided +-1 300 65663 4294967343 71.26 34.62 true calvin ovid 2013-03-01 09:11:58.703262 78.56 study skills +-1 348 65556 4294967413 35.17 9.51 false bob young 2013-03-01 09:11:58.70328 45.81 quiet hour +-1 372 65680 4294967490 15.45 18.09 false ethan laertes 2013-03-01 09:11:58.70311 65.88 opthamology +-1 417 65685 4294967492 28.89 5.19 true mike white 2013-03-01 09:11:58.703275 90.69 forestry PREHOOK: query: SELECT SUM(HASH(*)) FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i) as q PREHOOK: type: QUERY @@ -284,10 +284,6 @@ POSTHOOK: query: SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_ POSTHOOK: type: QUERY POSTHOOK: Input: default@over1korc_n1 #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL 374 65560 4294967516 65.43 22.48 true oscar quirinius 2013-03-01 09:11:58.703316 16.86 mathematics -NULL 409 65536 4294967490 46.97 25.92 false fred miller 2013-03-01 09:11:58.703116 33.45 history -NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703111 18.80 mathematics -3 275 65622 4294967302 71.78 8.49 false wendy robinson 2013-03-01 09:11:58.703294 95.39 undecided -3 344 65733 4294967363 0.56 11.96 true rachel thompson 2013-03-01 09:11:58.703276 88.46 wind surfing -3 376 65548 4294967431 96.78 43.23 false fred ellison 2013-03-01 09:11:58.703233 75.39 education @@ -304,6 +300,10 @@ NULL 473 65720 4294967324 80.74 40.6 false holly falkner 2013-03-01 09:11:58.703 -2 461 65648 4294967425 58.52 24.85 false rachel thompson 2013-03-01 09:11:58.703318 85.62 zync studies -1 268 65778 4294967418 56.33 44.73 true calvin falkner 2013-03-01 09:11:58.70322 7.37 history -1 281 65643 4294967323 15.1 45.0 false irene nixon 2013-03-01 09:11:58.703223 80.96 undecided +-1 300 65663 4294967343 71.26 34.62 true calvin ovid 2013-03-01 09:11:58.703262 78.56 study skills +-1 348 65556 4294967413 35.17 9.51 false bob young 2013-03-01 09:11:58.70328 45.81 quiet hour +-1 372 65680 4294967490 15.45 18.09 false ethan laertes 2013-03-01 09:11:58.70311 65.88 opthamology +-1 417 65685 4294967492 28.89 5.19 true mike white 2013-03-01 09:11:58.703275 90.69 forestry PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT SUM(HASH(*)) FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i) as q diff --git ql/src/test/results/clientpositive/vector_date_1.q.out ql/src/test/results/clientpositive/vector_date_1.q.out index 6e997bc2b8..fd233d03e7 100644 --- ql/src/test/results/clientpositive/vector_date_1.q.out +++ ql/src/test/results/clientpositive/vector_date_1.q.out @@ -58,9 +58,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### vector_date_1.dt1 vector_date_1.dt2 -NULL NULL 1999-12-31 2000-01-01 2001-01-01 2001-06-01 +NULL NULL PREHOOK: query: explain vectorization detail select dt1, dt2, @@ -197,9 +197,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 dt2 _c2 _c3 _c4 _c5 _c6 _c7 _c8 _c9 -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 2000-01-01 true true true true true true true true 2001-01-01 2001-06-01 true true true true true true true true +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, dt2, @@ -336,9 +336,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 dt2 _c2 _c3 _c4 _c5 _c6 _c7 _c8 _c9 -NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 2000-01-01 false false false false false false false false 2001-01-01 2001-06-01 false false false false false false false false +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, @@ -475,9 +475,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 _c1 _c2 _c3 _c4 _c5 _c6 _c7 _c8 -NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 true true true true true true true true 2001-01-01 true true true true true true true true +NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, @@ -614,9 +614,9 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@vector_date_1 #### A masked pattern was here #### dt1 _c1 _c2 _c3 _c4 _c5 _c6 _c7 _c8 -NULL NULL NULL NULL NULL NULL NULL NULL NULL 1999-12-31 false false false false false false false false 2001-01-01 false false false false false false false false +NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: explain vectorization detail select dt1, dt2 diff --git ql/src/test/results/clientpositive/vector_decimal_1.q.out ql/src/test/results/clientpositive/vector_decimal_1.q.out index 253ccdd56d..b0ae75a689 100644 --- ql/src/test/results/clientpositive/vector_decimal_1.q.out +++ ql/src/test/results/clientpositive/vector_decimal_1.q.out @@ -132,8 +132,8 @@ POSTHOOK: query: select cast(t as boolean) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL true +NULL PREHOOK: query: explain vectorization detail select cast(t as tinyint) from decimal_1 order by t PREHOOK: type: QUERY @@ -223,8 +223,8 @@ POSTHOOK: query: select cast(t as tinyint) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as smallint) from decimal_1 order by t PREHOOK: type: QUERY @@ -314,8 +314,8 @@ POSTHOOK: query: select cast(t as smallint) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as int) from decimal_1 order by t PREHOOK: type: QUERY @@ -405,8 +405,8 @@ POSTHOOK: query: select cast(t as int) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as bigint) from decimal_1 order by t PREHOOK: type: QUERY @@ -496,8 +496,8 @@ POSTHOOK: query: select cast(t as bigint) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17 +NULL PREHOOK: query: explain vectorization detail select cast(t as float) from decimal_1 order by t PREHOOK: type: QUERY @@ -587,8 +587,8 @@ POSTHOOK: query: select cast(t as float) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17.29 +NULL PREHOOK: query: explain vectorization detail select cast(t as double) from decimal_1 order by t PREHOOK: type: QUERY @@ -678,8 +678,8 @@ POSTHOOK: query: select cast(t as double) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17.29 +NULL PREHOOK: query: explain vectorization detail select cast(t as string) from decimal_1 order by t PREHOOK: type: QUERY @@ -769,8 +769,8 @@ POSTHOOK: query: select cast(t as string) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 17.29 +NULL PREHOOK: query: explain vectorization detail select cast(t as timestamp) from decimal_1 order by t PREHOOK: type: QUERY @@ -860,8 +860,8 @@ POSTHOOK: query: select cast(t as timestamp) from decimal_1 order by t POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_1 #### A masked pattern was here #### -NULL 1970-01-01 00:00:17.29 +NULL PREHOOK: query: drop table decimal_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_1 diff --git ql/src/test/results/clientpositive/vector_decimal_10_0.q.out ql/src/test/results/clientpositive/vector_decimal_10_0.q.out index 6526abe576..c53c0b19a5 100644 --- ql/src/test/results/clientpositive/vector_decimal_10_0.q.out +++ ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -121,8 +121,8 @@ POSTHOOK: query: SELECT `dec` FROM `DECIMAL` order by `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal #### A masked pattern was here #### -NULL 1000000000 +NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT `dec` FROM `decimal_txt` order by `dec` PREHOOK: type: QUERY @@ -211,8 +211,8 @@ POSTHOOK: query: SELECT `dec` FROM `decimal_txt` order by `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_txt #### A masked pattern was here #### -NULL 1000000000 +NULL PREHOOK: query: DROP TABLE DECIMAL_txt PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_txt diff --git ql/src/test/results/clientpositive/vector_decimal_3.q.out ql/src/test/results/clientpositive/vector_decimal_3.q.out index 3e9a1ee909..b292c9a01b 100644 --- ql/src/test/results/clientpositive/vector_decimal_3.q.out +++ ql/src/test/results/clientpositive/vector_decimal_3.q.out @@ -48,7 +48,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -86,6 +85,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key DESC, value DESC PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 @@ -140,7 +140,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_3_n1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -178,6 +177,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_3_n1 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 @@ -186,7 +186,6 @@ POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_3_n1 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL -1234567890.123456789000000000 -4400.000000000000000000 -1255.490000000000000000 @@ -215,6 +214,7 @@ NULL 125.200000000000000000 200.000000000000000000 1234567890.123456780000000000 +NULL PREHOOK: query: SELECT key, sum(value) FROM DECIMAL_3_n1 GROUP BY key ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 @@ -223,7 +223,6 @@ POSTHOOK: query: SELECT key, sum(value) FROM DECIMAL_3_n1 GROUP BY key ORDER BY POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_3_n1 #### A masked pattern was here #### -NULL 0 -1234567890.123456789000000000 -1234567890 -4400.000000000000000000 4400 -1255.490000000000000000 -1255 @@ -252,6 +251,7 @@ NULL 0 125.200000000000000000 125 200.000000000000000000 200 1234567890.123456780000000000 1234567890 +NULL 0 PREHOOK: query: SELECT value, sum(key) FROM DECIMAL_3_n1 GROUP BY value ORDER BY value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_3_n1 diff --git ql/src/test/results/clientpositive/vector_decimal_4.q.out ql/src/test/results/clientpositive/vector_decimal_4.q.out index d365fb99ad..fc18645663 100644 --- ql/src/test/results/clientpositive/vector_decimal_4.q.out +++ ql/src/test/results/clientpositive/vector_decimal_4.q.out @@ -56,7 +56,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_1_n0 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_1_n0 #### A masked pattern was here #### -NULL 0 -1234567890.1234567890000000000000000 -1234567890 -4400.0000000000000000000000000 4400 -1255.4900000000000000000000000 -1255 @@ -94,6 +93,7 @@ NULL 0 125.2000000000000000000000000 125 200.0000000000000000000000000 200 1234567890.1234567800000000000000000 1234567890 +NULL 0 PREHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2_n0 @@ -102,7 +102,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2_n0 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -140,6 +139,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2_n0 @@ -148,7 +148,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2_n0 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -186,6 +185,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@decimal_4_2_n0 @@ -194,7 +194,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_4_2_n0 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_4_2_n0 #### A masked pattern was here #### -NULL NULL -1234567890.1234567890000000000000000 -3703703670.3703703670000000000000000 -4400.0000000000000000000000000 -13200.0000000000000000000000000 -1255.4900000000000000000000000 -3766.4700000000000000000000000 @@ -232,6 +231,7 @@ NULL NULL 125.2000000000000000000000000 375.6000000000000000000000000 200.0000000000000000000000000 600.0000000000000000000000000 1234567890.1234567800000000000000000 3703703670.3703703400000000000000000 +NULL NULL PREHOOK: query: DROP TABLE DECIMAL_4_1_n0 PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_4_1_n0 diff --git ql/src/test/results/clientpositive/vector_decimal_5.q.out ql/src/test/results/clientpositive/vector_decimal_5.q.out index a2e114b410..25a16cf74c 100644 --- ql/src/test/results/clientpositive/vector_decimal_5.q.out +++ ql/src/test/results/clientpositive/vector_decimal_5.q.out @@ -56,9 +56,6 @@ POSTHOOK: query: SELECT key FROM DECIMAL_5 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5 #### A masked pattern was here #### -NULL -NULL -NULL -4400.00000 -1255.49000 -1.12200 @@ -94,6 +91,9 @@ NULL 124.00000 125.20000 200.00000 +NULL +NULL +NULL PREHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key PREHOOK: type: QUERY PREHOOK: Input: default@decimal_5 @@ -102,7 +102,6 @@ POSTHOOK: query: SELECT DISTINCT key FROM DECIMAL_5 ORDER BY key POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_5 #### A masked pattern was here #### -NULL -4400.00000 -1255.49000 -1.12200 @@ -129,6 +128,7 @@ NULL 124.00000 125.20000 200.00000 +NULL PREHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 PREHOOK: type: QUERY POSTHOOK: query: explain SELECT cast(key as decimal) FROM DECIMAL_5 diff --git ql/src/test/results/clientpositive/vector_decimal_6.q.out ql/src/test/results/clientpositive/vector_decimal_6.q.out index 445896b145..ab92ba1e66 100644 --- ql/src/test/results/clientpositive/vector_decimal_6.q.out +++ ql/src/test/results/clientpositive/vector_decimal_6.q.out @@ -194,11 +194,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_6_1 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_1 #### A masked pattern was here #### -NULL -1234567890 -NULL 0 -NULL 3 -NULL 4 -NULL 1234567890 -4400.00000 4400 -1255.49000 -1255 -1.12200 -11 @@ -221,6 +216,11 @@ NULL 1234567890 124.00000 124 125.20000 125 23232.23435 2 +NULL -1234567890 +NULL 0 +NULL 3 +NULL 4 +NULL 1234567890 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT * FROM DECIMAL_6_2 ORDER BY key, value PREHOOK: type: QUERY @@ -309,7 +309,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_6_2 ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_2 #### A masked pattern was here #### -NULL 0 -1234567890.1235 -1234567890 -4400.0000 4400 -1255.4900 -1255 @@ -336,6 +335,7 @@ NULL 0 2389432.2375 3 2389432.2375 4 1234567890.1235 1234567890 +NULL 0 PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT T.key from ( SELECT key, value from DECIMAL_6_1 @@ -432,12 +432,6 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_1 POSTHOOK: Input: default@decimal_6_2 #### A masked pattern was here #### -NULL -NULL -NULL -NULL -NULL -NULL -1234567890.12350 -4400.00000 -4400.00000 @@ -486,6 +480,12 @@ NULL 2389432.23750 2389432.23750 1234567890.12350 +NULL +NULL +NULL +NULL +NULL +NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL CREATE TABLE DECIMAL_6_3 STORED AS ORC AS SELECT key + 5.5 AS k, value * 11 AS v from DECIMAL_6_1 ORDER BY v PREHOOK: type: CREATETABLE_AS_SELECT @@ -612,11 +612,6 @@ POSTHOOK: query: SELECT * FROM DECIMAL_6_3 ORDER BY k, v POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_6_3 #### A masked pattern was here #### -NULL -695344902 -NULL 0 -NULL 33 -NULL 44 -NULL 695344902 -4394.50000 48400 -1249.99000 -13805 4.37800 -121 @@ -639,3 +634,8 @@ NULL 695344902 129.50000 1364 130.70000 1375 23237.73435 22 +NULL -695344902 +NULL 0 +NULL 33 +NULL 44 +NULL 695344902 diff --git ql/src/test/results/clientpositive/vector_decimal_precision.q.out ql/src/test/results/clientpositive/vector_decimal_precision.q.out index e3e354f925..a530b3b2ba 100644 --- ql/src/test/results/clientpositive/vector_decimal_precision.q.out +++ ql/src/test/results/clientpositive/vector_decimal_precision.q.out @@ -59,6 +59,37 @@ POSTHOOK: query: SELECT * FROM DECIMAL_PRECISION ORDER BY `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision #### A masked pattern was here #### +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0.0000000000 +0.1234567890 +0.1234567890 +1.2345678901 +1.2345678901 +1.2345678901 +12.3456789012 +12.3456789012 +12.3456789012 +123.4567890123 +123.4567890123 +123.4567890123 +1234.5678901235 +1234.5678901235 +1234.5678901235 +12345.6789012346 +12345.6789012346 +123456.7890123456 +123456.7890123457 +1234567.8901234560 +1234567.8901234568 +12345678.9012345600 +12345678.9012345679 +123456789.0123456000 +123456789.0123456789 +1234567890.1234560000 +1234567890.1234567890 NULL NULL NULL @@ -103,37 +134,6 @@ NULL NULL NULL NULL -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 -0.1234567890 -0.1234567890 -1.2345678901 -1.2345678901 -1.2345678901 -12.3456789012 -12.3456789012 -12.3456789012 -123.4567890123 -123.4567890123 -123.4567890123 -1234.5678901235 -1234.5678901235 -1234.5678901235 -12345.6789012346 -12345.6789012346 -123456.7890123456 -123456.7890123457 -1234567.8901234560 -1234567.8901234568 -12345678.9012345600 -12345678.9012345679 -123456789.0123456000 -123456789.0123456789 -1234567890.1234560000 -1234567890.1234567890 PREHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -142,50 +142,6 @@ POSTHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION ORDER POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision #### A masked pattern was here #### -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 @@ -217,14 +173,6 @@ NULL NULL NULL 123456789.0123456789 123456790.0123456789 123456788.0123456789 1234567890.1234560000 1234567891.1234560000 1234567889.1234560000 1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 -PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL @@ -269,6 +217,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 @@ -300,6 +256,50 @@ NULL NULL NULL 123456789.0123456789 246913578.0246913578 41152263.004115226300 1234567890.1234560000 2469135780.2469120000 411522630.041152000000 1234567890.1234567890 2469135780.2469135780 411522630.041152263000 +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL PREHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -308,50 +308,6 @@ POSTHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION ORDER BY `dec` POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision #### A masked pattern was here #### -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL 0.0000000000 0.000000000000 0.0000000000 0.000000000000 0.0000000000 0.000000000000 @@ -383,14 +339,6 @@ NULL NULL 123456789.0123456789 13717421.001371742100 1234567890.1234560000 137174210.013717333333 1234567890.1234567890 137174210.013717421000 -PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -435,6 +383,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 @@ -466,14 +422,6 @@ NULL NULL 123456789.0123456789 4572473.6671239140333 1234567890.1234560000 45724736.6712391111111 1234567890.1234567890 45724736.6712391403333 -PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -518,6 +466,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision +#### A masked pattern was here #### 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 @@ -549,6 +505,50 @@ NULL NULL 123456789.0123456789 15241578753238836.75019051998750191 1234567890.1234560000 1524157875323881726.87092138393600000 1234567890.1234567890 1524157875323883675.01905199875019052 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION @@ -727,50 +727,6 @@ POSTHOOK: query: SELECT `dec`, `dec` + 1, `dec` - 1 FROM DECIMAL_PRECISION_txt_s POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_txt_small #### A masked pattern was here #### -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL -NULL NULL NULL 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 0.0000000000 1.0000000000 -1.0000000000 @@ -802,14 +758,6 @@ NULL NULL NULL 123456789.0123456789 123456790.0123456789 123456788.0123456789 1234567890.1234560000 1234567891.1234560000 1234567889.1234560000 1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 -PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL @@ -854,6 +802,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * 2, `dec` / 3 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 0.0000000000 0.0000000000 0.000000000000 @@ -885,6 +841,50 @@ NULL NULL NULL 123456789.0123456789 246913578.0246913578 41152263.004115226300 1234567890.1234560000 2469135780.2469120000 411522630.041152000000 1234567890.1234567890 2469135780.2469135780 411522630.041152263000 +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL PREHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision_txt_small @@ -893,50 +893,6 @@ POSTHOOK: query: SELECT `dec`, `dec` / 9 FROM DECIMAL_PRECISION_txt_small ORDER POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_precision_txt_small #### A masked pattern was here #### -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL -NULL NULL 0.0000000000 0.000000000000 0.0000000000 0.000000000000 0.0000000000 0.000000000000 @@ -968,14 +924,6 @@ NULL NULL 123456789.0123456789 13717421.001371742100 1234567890.1234560000 137174210.013717333333 1234567890.1234567890 137174210.013717421000 -PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -1020,6 +968,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` / 27 FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 0.0000000000 0.0000000000000 @@ -1051,14 +1007,6 @@ NULL NULL 123456789.0123456789 4572473.6671239140333 1234567890.1234560000 45724736.6712391111111 1234567890.1234567890 45724736.6712391403333 -PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -PREHOOK: type: QUERY -PREHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### -POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` -POSTHOOK: type: QUERY -POSTHOOK: Input: default@decimal_precision_txt_small -#### A masked pattern was here #### NULL NULL NULL NULL NULL NULL @@ -1103,6 +1051,14 @@ NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +PREHOOK: type: QUERY +PREHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### +POSTHOOK: query: SELECT `dec`, `dec` * `dec` FROM DECIMAL_PRECISION_txt_small ORDER BY `dec` +POSTHOOK: type: QUERY +POSTHOOK: Input: default@decimal_precision_txt_small +#### A masked pattern was here #### 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 0.0000000000 0.00000000000000000 @@ -1134,6 +1090,50 @@ NULL NULL 123456789.0123456789 15241578753238836.75019051998750191 1234567890.1234560000 1524157875323881726.87092138393600000 1234567890.1234567890 1524157875323883675.01905199875019052 +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL +NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION_txt_small PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN VECTORIZATION DETAIL SELECT avg(`dec`), sum(`dec`) FROM DECIMAL_PRECISION_txt_small diff --git ql/src/test/results/clientpositive/vector_left_outer_join2.q.out ql/src/test/results/clientpositive/vector_left_outer_join2.q.out index 7f2f17fe37..8192435ead 100644 --- ql/src/test/results/clientpositive/vector_left_outer_join2.q.out +++ ql/src/test/results/clientpositive/vector_left_outer_join2.q.out @@ -65,6 +65,7 @@ POSTHOOK: Output: default@tjoin1 POSTHOOK: Lineage: tjoin1.c1 SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin1.c2 EXPRESSION [(tjoin1stage)tjoin1stage.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin1.rnum SIMPLE [(tjoin1stage)tjoin1stage.FieldSchema(name:rnum, type:int, comment:null), ] +_col0 _col1 _col2 PREHOOK: query: INSERT INTO TABLE TJOIN2 SELECT * from TJOIN2STAGE PREHOOK: type: QUERY PREHOOK: Input: default@tjoin2stage @@ -76,12 +77,14 @@ POSTHOOK: Output: default@tjoin2 POSTHOOK: Lineage: tjoin2.c1 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c1, type:int, comment:null), ] POSTHOOK: Lineage: tjoin2.c2 SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:c2, type:char(2), comment:null), ] POSTHOOK: Lineage: tjoin2.rnum SIMPLE [(tjoin2stage)tjoin2stage.FieldSchema(name:rnum, type:int, comment:null), ] -PREHOOK: query: explain vectorization expression +tjoin2stage.rnum tjoin2stage.c1 tjoin2stage.c2 +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: false enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] @@ -166,15 +169,21 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: false + enabledConditionsNotMet: [hive.vectorized.execution.enabled IS false] + STAGE DEPENDENCIES: Stage-4 is a root stage Stage-3 depends on stages: Stage-4 @@ -255,15 +264,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -305,6 +316,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -359,6 +371,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Local Work: Map Reduce Local Work @@ -378,15 +396,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -428,6 +448,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -482,6 +503,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Local Work: Map Reduce Local Work @@ -501,15 +528,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -551,6 +580,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -605,6 +635,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Local Work: Map Reduce Local Work @@ -624,15 +660,17 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL -PREHOOK: query: explain vectorization expression +PREHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) PREHOOK: type: QUERY -POSTHOOK: query: explain vectorization expression +POSTHOOK: query: explain vectorization detail select tjoin1.rnum, tjoin1.c1, tjoin1.c2, tjoin2.c2 as c2j2 from tjoin1 left outer join tjoin2 on ( tjoin1.c1 = tjoin2.c1 and tjoin1.c2 > 15 ) POSTHOOK: type: QUERY +Explain PLAN VECTORIZATION: enabled: true enabledConditionsMet: [hive.vectorized.execution.enabled IS true] @@ -674,6 +712,7 @@ STAGE PLANS: Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true + vectorizationSchemaColumns: [0:rnum:int, 1:c1:int, 2:c2:int, 3:ROW__ID:struct] Select Operator expressions: rnum (type: int), c1 (type: int), c2 (type: int) outputColumnNames: _col0, _col1, _col2 @@ -728,6 +767,12 @@ STAGE PLANS: allNative: false usesVectorUDFAdaptor: false vectorized: true + rowBatchContext: + dataColumnCount: 3 + includeColumns: [0, 1, 2] + dataColumns: rnum:int, c1:int, c2:int + partitionColumnCount: 0 + scratchColumnTypeNames: [string] Local Work: Map Reduce Local Work @@ -747,6 +792,7 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tjoin1 POSTHOOK: Input: default@tjoin2 #### A masked pattern was here #### +tjoin1.rnum tjoin1.c1 tjoin1.c2 c2j2 0 10 15 NULL 1 20 25 NULL 2 NULL 50 NULL diff --git ql/src/test/results/clientpositive/vector_outer_join1.q.out ql/src/test/results/clientpositive/vector_outer_join1.q.out index ece32f6aa6..2026d59e7a 100644 --- ql/src/test/results/clientpositive/vector_outer_join1.q.out +++ ql/src/test/results/clientpositive/vector_outer_join1.q.out @@ -120,11 +120,11 @@ POSTHOOK: query: select * from small_alltypesorc3a POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: select * from small_alltypesorc4a PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a @@ -211,11 +211,11 @@ POSTHOOK: Input: default@small_alltypesorc_a -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail select * from small_alltypesorc_a c @@ -248,11 +248,11 @@ STAGE PLANS: $hdt$_1:cd TableScan alias: cd - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col2 (type: int) @@ -263,7 +263,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -274,7 +274,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -289,13 +289,13 @@ STAGE PLANS: nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23 - Statistics: Num rows: 16 Data size: 3652 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3718 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 16 Data size: 3652 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3718 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -353,11 +353,11 @@ POSTHOOK: Input: default@small_alltypesorc_a -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail select c.ctinyint from small_alltypesorc_a c @@ -390,11 +390,11 @@ STAGE PLANS: $hdt$_1:hd TableScan alias: hd - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: _col0 - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: tinyint) @@ -405,7 +405,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -416,7 +416,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0] - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -431,13 +431,13 @@ STAGE PLANS: nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false outputColumnNames: _col0 - Statistics: Num rows: 16 Data size: 3652 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3718 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false File Sink Vectorization: className: VectorFileSinkOperator native: false - Statistics: Num rows: 16 Data size: 3652 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3718 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -627,11 +627,11 @@ STAGE PLANS: $hdt$_1:cd TableScan alias: cd - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int) outputColumnNames: _col0 - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: int) @@ -639,11 +639,11 @@ STAGE PLANS: $hdt$_2:hd TableScan alias: hd - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: _col0 - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: tinyint) @@ -654,7 +654,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -665,7 +665,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [0, 2] - Statistics: Num rows: 15 Data size: 3320 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3380 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -680,7 +680,7 @@ STAGE PLANS: nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false outputColumnNames: _col0 - Statistics: Num rows: 16 Data size: 3652 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3718 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -695,7 +695,7 @@ STAGE PLANS: nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false outputColumnNames: _col0 - Statistics: Num rows: 17 Data size: 4017 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 4089 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(), sum(_col0) Group By Vectorization: diff --git ql/src/test/results/clientpositive/vector_outer_join2.q.out ql/src/test/results/clientpositive/vector_outer_join2.q.out index 455d09e299..caf51a1673 100644 --- ql/src/test/results/clientpositive/vector_outer_join2.q.out +++ ql/src/test/results/clientpositive/vector_outer_join2.q.out @@ -94,11 +94,11 @@ POSTHOOK: query: select * from small_alltypesorc1a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a_n0 #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-51 NULL -1064981602 -1444011153 -51.0 NULL aY3tpnr6wfvmWMG0U881 2Ol4N3Ha0815Ej54lA2N 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1065775394 -1331703092 -51.0 NULL aD88uS2N8DmqPlvjOa7F46i7 Ut8ka2o8iokF504065PYS 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1066684273 2034191923 -51.0 NULL 2W4Kg220OcCy065HG60k6e D7GOQhc3qbAR6 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1067683781 1750003656 -51.0 NULL IbgbUvP5 47x2I874 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1071480828 -1401575336 -51.0 NULL aw724t8c5558x2xneC624 4uE7l74tESBiKfu7c8wM7GA 1969-12-31 16:00:08.451 NULL true true PREHOOK: query: select * from small_alltypesorc2a_n0 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a_n0 @@ -120,11 +120,11 @@ POSTHOOK: query: select * from small_alltypesorc3a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3a_n0 #### A masked pattern was here #### -NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL -NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL -NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL -NULL -15632 528534767 NULL NULL -15632.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 15:59:53.593 true NULL -NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.582 1969-12-31 16:00:00.518 true NULL +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: select * from small_alltypesorc4a_n0 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4a_n0 @@ -206,26 +206,26 @@ POSTHOOK: query: select * from small_alltypesorc_a_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n0 #### A masked pattern was here #### +-51 NULL -1064981602 -1444011153 -51.0 NULL aY3tpnr6wfvmWMG0U881 2Ol4N3Ha0815Ej54lA2N 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1065775394 -1331703092 -51.0 NULL aD88uS2N8DmqPlvjOa7F46i7 Ut8ka2o8iokF504065PYS 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1066684273 2034191923 -51.0 NULL 2W4Kg220OcCy065HG60k6e D7GOQhc3qbAR6 1969-12-31 16:00:08.451 NULL false false +-51 NULL -1067683781 1750003656 -51.0 NULL IbgbUvP5 47x2I874 1969-12-31 16:00:08.451 NULL false true +-51 NULL -1071480828 -1401575336 -51.0 NULL aw724t8c5558x2xneC624 4uE7l74tESBiKfu7c8wM7GA 1969-12-31 16:00:08.451 NULL true true -60 -200 NULL NULL -60.0 -200.0 NULL NULL 1969-12-31 16:00:11.996 1969-12-31 15:59:55.451 NULL NULL -61 -7196 NULL NULL -61.0 -7196.0 NULL 8Mlns2Tl6E0g 1969-12-31 15:59:44.823 1969-12-31 15:59:58.174 NULL false -61 -7196 NULL NULL -61.0 -7196.0 NULL fUJIN 1969-12-31 16:00:11.842 1969-12-31 15:59:58.174 NULL false -62 -7196 NULL NULL -62.0 -7196.0 NULL jf1Cw6qhkNToQuud 1969-12-31 16:00:12.388 1969-12-31 15:59:58.174 NULL false -62 -7196 NULL NULL -62.0 -7196.0 NULL yLiOchx5PfDTFdcMduBTg 1969-12-31 16:00:02.373 1969-12-31 15:59:58.174 NULL false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true -64 -7196 NULL 406535485 -64.0 -7196.0 NULL E011i 1969-12-31 15:59:56.048 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -NULL -13166 626923679 NULL NULL -13166.0 821UdmGbkEf4j NULL 1969-12-31 15:59:55.089 1969-12-31 16:00:15.69 true NULL -NULL -14426 626923679 NULL NULL -14426.0 821UdmGbkEf4j NULL 1969-12-31 16:00:11.505 1969-12-31 16:00:13.309 true NULL -NULL -14847 626923679 NULL NULL -14847.0 821UdmGbkEf4j NULL 1969-12-31 16:00:00.612 1969-12-31 15:59:43.704 true NULL -NULL -15632 528534767 NULL NULL -15632.0 cvLH6Eat2yFsyy7p NULL NULL 1969-12-31 15:59:53.593 true NULL -NULL -15830 253665376 NULL NULL -15830.0 1cGVWH7n1QU NULL 1969-12-31 16:00:02.582 1969-12-31 16:00:00.518 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: explain vectorization detail select count(*), sum(t1.c_cbigint) from (select c.cbigint as c_cbigint from small_alltypesorc_a_n0 c @@ -267,11 +267,11 @@ STAGE PLANS: $hdt$_1:cd TableScan alias: cd - Statistics: Num rows: 20 Data size: 4182 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4274 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 4182 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4274 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: int) @@ -279,11 +279,11 @@ STAGE PLANS: $hdt$_2:hd TableScan alias: hd - Statistics: Num rows: 20 Data size: 4182 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4274 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cbigint (type: bigint) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 4182 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4274 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: bigint) @@ -294,7 +294,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 20 Data size: 4182 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4274 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true vectorizationSchemaColumns: [0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct] @@ -305,7 +305,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [2, 3] - Statistics: Num rows: 20 Data size: 4182 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 20 Data size: 4274 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -320,7 +320,7 @@ STAGE PLANS: nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false outputColumnNames: _col1 - Statistics: Num rows: 22 Data size: 4600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 4701 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join 0 to 1 @@ -335,7 +335,7 @@ STAGE PLANS: nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false outputColumnNames: _col1 - Statistics: Num rows: 24 Data size: 5060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 5171 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(), sum(_col1) Group By Vectorization: @@ -419,4 +419,4 @@ left outer join small_alltypesorc_a_n0 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n0 #### A masked pattern was here #### -34 -26289186744 +24 -3110813706 diff --git ql/src/test/results/clientpositive/vector_outer_join3.q.out ql/src/test/results/clientpositive/vector_outer_join3.q.out index 1f1c1314fe..6cae3572a9 100644 --- ql/src/test/results/clientpositive/vector_outer_join3.q.out +++ ql/src/test/results/clientpositive/vector_outer_join3.q.out @@ -94,11 +94,11 @@ POSTHOOK: query: select * from small_alltypesorc1a_n1 POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc1a_n1 #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: select * from small_alltypesorc2a_n1 PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc2a_n1 @@ -216,16 +216,16 @@ POSTHOOK: Input: default@small_alltypesorc_a_n1 -51 NULL NULL -1874052220 -51.0 NULL c61B47I604gymFJ sjWQS78 1969-12-31 16:00:08.451 NULL false false -51 NULL NULL -1927203921 -51.0 NULL 45ja5suO 42S0I0 1969-12-31 16:00:08.451 NULL true true -51 NULL NULL -1970551565 -51.0 NULL r2uhJH3 loXMWyrHjVeK 1969-12-31 16:00:08.451 NULL false false +-64 -10462 626923679 NULL -64.0 -10462.0 821UdmGbkEf4j NULL 1969-12-31 16:00:02.496 1969-12-31 16:00:00.164 true NULL +-64 -15920 528534767 NULL -64.0 -15920.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:51.859 1969-12-31 16:00:14.468 true NULL +-64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -7196 NULL -1615920595 -64.0 -7196.0 NULL X5rDjl 1969-12-31 16:00:11.912 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -1639157869 -64.0 -7196.0 NULL IJ0Oj7qAiqNGsN7gn 1969-12-31 16:00:01.785 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL -527203677 -64.0 -7196.0 NULL JBE4H5RoK412Cs260I72 1969-12-31 15:59:50.184 1969-12-31 15:59:58.174 NULL true -64 -7196 NULL 406535485 -64.0 -7196.0 NULL E011i 1969-12-31 15:59:56.048 1969-12-31 15:59:58.174 NULL false -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +-64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL +-64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -244,7 +244,7 @@ left outer join small_alltypesorc_a_n1 hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `cint`, `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t`\nLEFT JOIN (SELECT `cint`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t0` ON `t`.`cint` = `t0`.`cint`\nLEFT JOIN (SELECT `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t1` ON `t`.`cstring1` = `t1`.`cstring1`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `cint`, `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t`\nLEFT JOIN (SELECT `cint`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t0` ON `t`.`cint` = `t0`.`cint`\nLEFT JOIN (SELECT `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t1` ON `t`.`cstring1` = `t1`.`cstring1`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 6]"},"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col1":"0:_col1"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col1"],"Statistics:":"Num rows: 22 Data size: 4493 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 4942 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 6]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c left outer join small_alltypesorc_a_n1 cd @@ -265,7 +265,7 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 #### A masked pattern was here #### -20 +32 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -284,7 +284,7 @@ left outer join small_alltypesorc_a_n1 hd on hd.cstring1 = c.cstring1 ) t1 POSTHOOK: type: QUERY -{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `cstring1`, `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t`\nLEFT JOIN (SELECT `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t0` ON `t`.`cstring2` = `t0`.`cstring2`\nLEFT JOIN (SELECT `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t1` ON `t`.`cstring1` = `t1`.`cstring1`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `cstring1`, `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t`\nLEFT JOIN (SELECT `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t0` ON `t`.`cstring2` = `t0`.`cstring2`\nLEFT JOIN (SELECT `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t1` ON `t`.`cstring1` = `t1`.`cstring1`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cstring2 (type: string)","columnExprMap:":{"_col0":"cstring2"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cstring1 (type: string)","columnExprMap:":{"_col0":"cstring1"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cstring1","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[6, 7]"},"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 7:string"],"bigTableValueExpressions:":["col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 22 Data size: 4493 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: string)","1":"_col0 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 4942 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c left outer join small_alltypesorc_a_n1 cd @@ -305,7 +305,7 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 #### A masked pattern was here #### -28 +24 PREHOOK: query: explain vectorization detail formatted select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c @@ -324,7 +324,7 @@ left outer join small_alltypesorc_a_n1 hd on hd.cstring1 = c.cstring1 and hd.cint = c.cint ) t1 POSTHOOK: type: QUERY -{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `cint`, `cbigint`, `cstring1`, `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t`\nLEFT JOIN (SELECT `cbigint`, `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t0` ON `t`.`cstring2` = `t0`.`cstring2` AND `t`.`cbigint` = `t0`.`cbigint`\nLEFT JOIN (SELECT `cint`, `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t1` ON `t`.`cstring1` = `t1`.`cstring1` AND `t`.`cint` = `t1`.`cint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4400 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4840 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 5324 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `cint`, `cbigint`, `cstring1`, `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t`\nLEFT JOIN (SELECT `cbigint`, `cstring2`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t0` ON `t`.`cstring2` = `t0`.`cstring2` AND `t`.`cbigint` = `t0`.`cbigint`\nLEFT JOIN (SELECT `cint`, `cstring1`\nFROM `default`.`small_alltypesorc_a_n1`) AS `t1` ON `t`.`cstring1` = `t1`.`cstring1` AND `t`.`cint` = `t1`.`cint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cbigint","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cbigint (type: bigint), cstring2 (type: string)","columnExprMap:":{"_col0":"cbigint","_col1":"cstring2"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["cint","cstring1"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"cint (type: int), cstring1 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cstring1"},"outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["cint","cbigint","cstring1","cstring2"],"database:":"default","Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_a_n1","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"cint (type: int), cbigint (type: bigint), cstring1 (type: string), cstring2 (type: string)","columnExprMap:":{"_col0":"cint","_col1":"cbigint","_col2":"cstring1","_col3":"cstring2"},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[2, 3, 6, 7]"},"Statistics:":"Num rows: 20 Data size: 4085 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col2":"0:_col2"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: bigint), _col3 (type: string)","1":"_col0 (type: bigint), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 3:bigint","col 7:string"],"bigTableValueExpressions:":["col 2:int","col 6:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col2"],"Statistics:":"Num rows: 22 Data size: 4493 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: int), _col2 (type: string)","1":"_col0 (type: int), _col1 (type: string)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:int","col 1:string"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 24 Data size: 4942 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[2, 3, 6, 7]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.cstring1 from small_alltypesorc_a_n1 c left outer join small_alltypesorc_a_n1 cd @@ -345,4 +345,4 @@ left outer join small_alltypesorc_a_n1 hd POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc_a_n1 #### A masked pattern was here #### -28 +24 diff --git ql/src/test/results/clientpositive/vector_outer_join4.q.out ql/src/test/results/clientpositive/vector_outer_join4.q.out index 2826698b0c..adbb75910d 100644 --- ql/src/test/results/clientpositive/vector_outer_join4.q.out +++ ql/src/test/results/clientpositive/vector_outer_join4.q.out @@ -130,16 +130,16 @@ POSTHOOK: query: select * from small_alltypesorc3b POSTHOOK: type: QUERY POSTHOOK: Input: default@small_alltypesorc3b #### A masked pattern was here #### -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: select * from small_alltypesorc4b PREHOOK: type: QUERY PREHOOK: Input: default@small_alltypesorc4b @@ -236,16 +236,16 @@ POSTHOOK: Input: default@small_alltypesorc_b -64 -7196 NULL 658026952 -64.0 -7196.0 NULL 4tAur 1969-12-31 15:59:53.866 1969-12-31 15:59:58.174 NULL true -64 -8080 528534767 NULL -64.0 -8080.0 cvLH6Eat2yFsyy7p NULL 1969-12-31 15:59:58.044 1969-12-31 15:59:48.655 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail formatted select * from small_alltypesorc_b c @@ -258,7 +258,7 @@ from small_alltypesorc_b c left outer join small_alltypesorc_b cd on cd.cint = c.cint POSTHOOK: type: QUERY -{"optimizedSQL":"SELECT *\nFROM (SELECT `ctinyint`, `csmallint`, `cint`, `cbigint`, `cfloat`, `cdouble`, `cstring1`, `cstring2`, `ctimestamp1`, `ctimestamp2`, `cboolean1`, `cboolean2`\nFROM `default`.`small_alltypesorc_b`) AS `t`\nLEFT JOIN (SELECT `ctinyint`, `csmallint`, `cint`, `cbigint`, `cfloat`, `cdouble`, `cstring1`, `cstring2`, `ctimestamp1`, `ctimestamp2`, `cboolean1`, `cboolean2`\nFROM `default`.`small_alltypesorc_b`) AS `t0` ON `t`.`cint` = `t0`.`cint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col10":"0:_col10","_col11":"0:_col11","_col12":"1:_col0","_col13":"1:_col1","_col14":"1:_col2","_col15":"1:_col3","_col16":"1:_col4","_col17":"1:_col5","_col18":"1:_col6","_col19":"1:_col7","_col2":"0:_col2","_col20":"1:_col8","_col21":"1:_col9","_col22":"1:_col10","_col23":"1:_col11","_col3":"0:_col3","_col4":"0:_col4","_col5":"0:_col5","_col6":"0:_col6","_col7":"0:_col7","_col8":"0:_col8","_col9":"0:_col9"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint","col 1:smallint","col 2:int","col 3:bigint","col 4:float","col 5:double","col 6:string","col 7:string","col 8:timestamp","col 9:timestamp","col 10:boolean","col 11:boolean"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint, bigint, bigint, double, double, string, string, timestamp, timestamp, bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} +{"optimizedSQL":"SELECT *\nFROM (SELECT `ctinyint`, `csmallint`, `cint`, `cbigint`, `cfloat`, `cdouble`, `cstring1`, `cstring2`, `ctimestamp1`, `ctimestamp2`, `cboolean1`, `cboolean2`\nFROM `default`.`small_alltypesorc_b`) AS `t`\nLEFT JOIN (SELECT `ctinyint`, `csmallint`, `cint`, `cbigint`, `cfloat`, `cdouble`, `cstring1`, `cstring2`, `ctimestamp1`, `ctimestamp2`, `cboolean1`, `cboolean2`\nFROM `default`.`small_alltypesorc_b`) AS `t0` ON `t`.`cint` = `t0`.`cint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","csmallint","cint","cbigint","cfloat","cdouble","cstring1","cstring2","ctimestamp1","ctimestamp2","cboolean1","cboolean2"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean)","columnExprMap:":{"_col0":"ctinyint","_col1":"csmallint","_col10":"cboolean1","_col11":"cboolean2","_col2":"cint","_col3":"cbigint","_col4":"cfloat","_col5":"cdouble","_col6":"cstring1","_col7":"cstring2","_col8":"ctimestamp1","_col9":"ctimestamp2"},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"},"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0","_col1":"0:_col1","_col10":"0:_col10","_col11":"0:_col11","_col12":"1:_col0","_col13":"1:_col1","_col14":"1:_col2","_col15":"1:_col3","_col16":"1:_col4","_col17":"1:_col5","_col18":"1:_col6","_col19":"1:_col7","_col2":"0:_col2","_col20":"1:_col8","_col21":"1:_col9","_col22":"1:_col10","_col23":"1:_col11","_col3":"0:_col3","_col4":"0:_col4","_col5":"0:_col5","_col6":"0:_col6","_col7":"0:_col7","_col8":"0:_col8","_col9":"0:_col9"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col2 (type: int)","1":"_col2 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint","col 1:smallint","col 2:int","col 3:bigint","col 4:float","col 5:double","col 6:string","col 7:string","col 8:timestamp","col 9:timestamp","col 10:boolean","col 11:boolean"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21","_col22","_col23"],"Statistics:":"Num rows: 33 Data size: 7480 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7480 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[bigint, bigint, bigint, bigint, double, double, string, string, timestamp, timestamp, bigint, bigint]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} PREHOOK: query: select * from small_alltypesorc_b c left outer join small_alltypesorc_b cd @@ -317,16 +317,16 @@ POSTHOOK: Input: default@small_alltypesorc_b -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -3097 253665376 NULL -64.0 -3097.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.013 1969-12-31 16:00:06.097 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -6907 253665376 NULL -64.0 -6907.0 1cGVWH7n1QU NULL NULL 1969-12-31 15:59:53.66 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -64 -9842 253665376 NULL -64.0 -9842.0 1cGVWH7n1QU NULL 1969-12-31 16:00:00.631 1969-12-31 16:00:01.781 true NULL -NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false NULL NULL -1015272448 -1887561756 NULL NULL jTQ68531mP 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:45.854 false false -NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false NULL NULL -609074876 -1887561756 NULL NULL EcM71 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:55.061 true false -NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false NULL NULL -700300206 -1887561756 NULL NULL kdqQE010 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:58.384 false false -NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true NULL NULL -726473298 1864027286 NULL NULL OFy1a1xf37f75b5N 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:11.799 true true -NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false NULL NULL -738747840 -1645852809 NULL NULL vmAT10eeE47fgH20pLi xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.55 true false -NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true NULL NULL -838810013 1864027286 NULL NULL N016jPED08o 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:44.252 false true -NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false NULL NULL -850295959 -1887561756 NULL NULL WMIgGA73 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:00.348 false false -NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false NULL NULL -886426182 -1887561756 NULL NULL 0i88xYq3gx1nW4vKjp7vBp3 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 16:00:04.472 true false -NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false NULL NULL -899422227 -1645852809 NULL NULL 73xdw4X xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:07.395 false false -NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false NULL NULL -971543377 -1645852809 NULL NULL uN803aW xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:05.43 false false +NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false NULL -16269 -378213344 -1645852809 NULL -16269.0 sOdj1Tmvbl03f xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:15.867 false false +NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false NULL -16274 -671342269 -1645852809 NULL -16274.0 3DE7EQo4KyT0hS xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:51.469 false false +NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false NULL -16296 -146635689 -1645852809 NULL -16296.0 r251rbt884txX2MNq4MM14 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:44.809 false false +NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false NULL -16296 593429004 -1887561756 NULL -16296.0 dhDYJ076SFcC 4hA4KQj2vD3fI6gX82220d NULL 1969-12-31 15:59:47.422 false false +NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false NULL -16300 -860437234 -1645852809 NULL -16300.0 Fb2W1r24opqN8m6571p xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:45.815 true false +NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false NULL -16306 384405526 -1645852809 NULL -16306.0 b5SoK8 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:11.105 true false +NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false NULL -16307 559926362 -1645852809 NULL -16307.0 nA8bdtWfPPQyP2hL5 xH7445Rals48VOulSyR5F NULL 1969-12-31 15:59:58.072 false false +NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false NULL -16309 -826497289 -1645852809 NULL -16309.0 54o058c3mK6ewOQ5 xH7445Rals48VOulSyR5F NULL 1969-12-31 16:00:10.761 false false +NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true NULL -16310 206154150 1864027286 NULL -16310.0 5Hy1y6 4KWs6gw7lv2WYd66P NULL 1969-12-31 16:00:00.821 false true +NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true NULL -16379 -894716315 1864027286 NULL -16379.0 2ArdYqML3654nUjGJk3 4KWs6gw7lv2WYd66P NULL 1969-12-31 15:59:47.059 true true PREHOOK: query: explain vectorization detail formatted select c.ctinyint from small_alltypesorc_b c @@ -339,7 +339,7 @@ from small_alltypesorc_b c left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint POSTHOOK: type: QUERY -{"optimizedSQL":"SELECT `t`.`ctinyint`\nFROM (SELECT `ctinyint`\nFROM `default`.`small_alltypesorc_b`) AS `t`\nLEFT JOIN (SELECT `ctinyint`\nFROM `default`.`small_alltypesorc_b`) AS `t0` ON `t`.`ctinyint` = `t0`.`ctinyint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} +{"optimizedSQL":"SELECT `t`.`ctinyint`\nFROM (SELECT `ctinyint`\nFROM `default`.`small_alltypesorc_b`) AS `t`\nLEFT JOIN (SELECT `ctinyint`\nFROM `default`.`small_alltypesorc_b`) AS `t0` ON `t`.`ctinyint` = `t0`.`ctinyint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-4":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-4"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-4":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_10"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0]"},"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_12","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7480 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_13","children":{"File Output Operator":{"compressed:":"false","File Sink Vectorization:":{"className:":"VectorFileSinkOperator","native:":"false"},"Statistics:":"Num rows: 33 Data size: 7480 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_14"}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_15"}}}}}} PREHOOK: query: select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b hd @@ -782,7 +782,7 @@ left outer join small_alltypesorc_b hd on hd.ctinyint = c.ctinyint ) t1 POSTHOOK: type: QUERY -{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `ctinyint`, `cint`\nFROM `default`.`small_alltypesorc_b`) AS `t`\nLEFT JOIN (SELECT `cint`\nFROM `default`.`small_alltypesorc_b`) AS `t0` ON `t`.`cint` = `t0`.`cint`\nLEFT JOIN (SELECT `ctinyint`\nFROM `default`.`small_alltypesorc_b`) AS `t1` ON `t`.`ctinyint` = `t1`.`ctinyint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6680 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7348 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8082 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} +{"optimizedSQL":"SELECT COUNT(*) AS `$f0`\nFROM (SELECT `ctinyint`, `cint`\nFROM `default`.`small_alltypesorc_b`) AS `t`\nLEFT JOIN (SELECT `cint`\nFROM `default`.`small_alltypesorc_b`) AS `t0` ON `t`.`cint` = `t0`.`cint`\nLEFT JOIN (SELECT `ctinyint`\nFROM `default`.`small_alltypesorc_b`) AS `t1` ON `t`.`ctinyint` = `t1`.`ctinyint`","PLAN VECTORIZATION":{"enabled":true,"enabledConditionsMet":["hive.vectorized.execution.enabled IS true"]},"cboInfo":"Plan optimized by CBO.","STAGE DEPENDENCIES":{"Stage-8":{"ROOT STAGE":"TRUE"},"Stage-3":{"DEPENDENT STAGES":"Stage-8"},"Stage-0":{"DEPENDENT STAGES":"Stage-3"}},"STAGE PLANS":{"Stage-8":{"Map Reduce Local Work":{"Alias -> Map Local Tables:":{"$hdt$_1:cd":{"Fetch Operator":{"limit:":"-1"}},"$hdt$_2:hd":{"Fetch Operator":{"limit:":"-1"}}},"Alias -> Map Local Operator Tree:":{"$hdt$_1:cd":{"TableScan":{"alias:":"cd","columns:":["cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_2","children":{"Select Operator":{"expressions:":"cint (type: int)","columnExprMap:":{"_col0":"cint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_3","children":{"HashTable Sink Operator":{"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"OperatorId:":"HASHTABLESINK_26"}}}}}},"$hdt$_2:hd":{"TableScan":{"alias:":"hd","columns:":["ctinyint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","isTempTable:":"false","OperatorId:":"TS_4","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint)","columnExprMap:":{"_col0":"ctinyint"},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_5","children":{"HashTable Sink Operator":{"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"OperatorId:":"HASHTABLESINK_24"}}}}}}}}},"Stage-3":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"c","columns:":["ctinyint","cint"],"database:":"default","Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","table:":"small_alltypesorc_b","TableScan Vectorization:":{"native:":"true","vectorizationSchemaColumns:":"[0:ctinyint:tinyint, 1:csmallint:smallint, 2:cint:int, 3:cbigint:bigint, 4:cfloat:float, 5:cdouble:double, 6:cstring1:string, 7:cstring2:string, 8:ctimestamp1:timestamp, 9:ctimestamp2:timestamp, 10:cboolean1:boolean, 11:cboolean2:boolean, 12:ROW__ID:struct]"},"isTempTable:":"false","OperatorId:":"TS_0","children":{"Select Operator":{"expressions:":"ctinyint (type: tinyint), cint (type: int)","columnExprMap:":{"_col0":"ctinyint","_col1":"cint"},"outputColumnNames:":["_col0","_col1"],"Select Vectorization:":{"className:":"VectorSelectOperator","native:":"true","projectedOutputColumnNums:":"[0, 2]"},"Statistics:":"Num rows: 30 Data size: 6800 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"SEL_28","children":{"Map Join Operator":{"columnExprMap:":{"_col0":"0:_col0"},"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col1 (type: int)","1":"_col0 (type: int)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 2:int"],"bigTableValueExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"outputColumnNames:":["_col0"],"Statistics:":"Num rows: 33 Data size: 7480 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_29","children":{"Map Join Operator":{"condition map:":[{"":"Left Outer Join 0 to 1"}],"keys:":{"0":"_col0 (type: tinyint)","1":"_col0 (type: tinyint)"},"Map Join Vectorization:":{"bigTableKeyExpressions:":["col 0:tinyint"],"className:":"VectorMapJoinOperator","native:":"false","nativeConditionsMet:":["hive.mapjoin.optimized.hashtable IS true","hive.vectorized.execution.mapjoin.native.enabled IS true","One MapJoin Condition IS true","No nullsafe IS true","Small table vectorizes IS true","Outer Join has keys IS true","Optimized Table and Supports Key Types IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 36 Data size: 8228 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"MAPJOIN_30","children":{"Group By Operator":{"aggregations:":["count()"],"Group By Vectorization:":{"aggregators:":["VectorUDAFCountStar(*) -> bigint"],"className:":"VectorGroupByOperator","groupByMode:":"HASH","native:":"false","vectorProcessingMode:":"HASH","projectedOutputColumnNums:":"[0]"},"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_31","children":{"Reduce Output Operator":{"columnExprMap:":{"VALUE._col0":"_col0"},"sort order:":"","Reduce Sink Vectorization:":{"className:":"VectorReduceSinkOperator","native:":"false","nativeConditionsMet:":["hive.vectorized.execution.reducesink.new.enabled IS true","No PTF TopN IS true","No DISTINCT columns IS true","BinarySortableSerDe for keys IS true","LazyBinarySerDe for values IS true"],"nativeConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: bigint)","OperatorId:":"RS_32"}}}}}}}}}}}}],"Execution mode:":"vectorized","Map Vectorization:":{"enabled:":"true","enabledConditionsMet:":["hive.vectorized.use.vectorized.input.format IS true"],"inputFormatFeatureSupport:":"[DECIMAL_64]","featureSupportInUse:":"[DECIMAL_64]","inputFileFormats:":["org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"],"allNative:":"false","usesVectorUDFAdaptor:":"false","vectorized:":"true","rowBatchContext:":{"dataColumnCount:":"12","includeColumns:":"[0, 2]","dataColumns:":["ctinyint:tinyint","csmallint:smallint","cint:int","cbigint:bigint","cfloat:float","cdouble:double","cstring1:string","cstring2:string","ctimestamp1:timestamp","ctimestamp2:timestamp","cboolean1:boolean","cboolean2:boolean"],"partitionColumnCount:":"0","scratchColumnTypeNames:":"[]"}},"Local Work:":{"Map Reduce Local Work":{}},"Reduce Vectorization:":{"enabled:":"false","enableConditionsMet:":["hive.vectorized.execution.reduce.enabled IS true"],"enableConditionsNotMet:":["hive.execution.engine mr IN [tez, spark] IS false"]},"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","OperatorId:":"GBY_15","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"},"OperatorId:":"FS_17"}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{"OperatorId:":"LIST_SINK_33"}}}}}} PREHOOK: query: select count(*) from (select c.ctinyint from small_alltypesorc_b c left outer join small_alltypesorc_b cd diff --git ql/src/test/results/clientpositive/vector_string_concat.q.out ql/src/test/results/clientpositive/vector_string_concat.q.out index ff9fad3c00..c807d165c3 100644 --- ql/src/test/results/clientpositive/vector_string_concat.q.out +++ ql/src/test/results/clientpositive/vector_string_concat.q.out @@ -460,7 +460,6 @@ POSTHOOK: query: SELECT CONCAT(CONCAT(CONCAT('Quarter ',CAST(CAST((MONTH(dt) - 1 POSTHOOK: type: QUERY POSTHOOK: Input: default@vectortab2korc_n0 #### A masked pattern was here #### -NULL Quarter 1-1970 Quarter 1-1971 Quarter 1-1972 @@ -510,3 +509,4 @@ Quarter 1-2015 Quarter 1-2016 Quarter 1-2017 Quarter 1-2018 +Quarter 1-2019 diff --git ql/src/test/results/clientpositive/vectorization_13.q.out ql/src/test/results/clientpositive/vectorization_13.q.out index f9c3aeb170..d2f3448137 100644 --- ql/src/test/results/clientpositive/vectorization_13.q.out +++ ql/src/test/results/clientpositive/vectorization_13.q.out @@ -290,46 +290,46 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -55 1969-12-31 16:00:11.38 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -55 1969-12-31 16:00:11.751 -55.0 NULL 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 -NULL -56 1969-12-31 16:00:13.602 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:13.958 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -56 1969-12-31 16:00:15.038 -56.0 NULL 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 -NULL -57 1969-12-31 16:00:11.451 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:11.883 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:12.626 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:13.578 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -57 1969-12-31 16:00:15.39 -57.0 NULL 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 -NULL -58 1969-12-31 16:00:12.065 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.683 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:12.948 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:14.066 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -58 1969-12-31 16:00:15.658 -58.0 NULL 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 -NULL -59 1969-12-31 16:00:12.008 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.15 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:13.625 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.296 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -59 1969-12-31 16:00:15.861 -59.0 NULL 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 -NULL -60 1969-12-31 16:00:11.504 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.641 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:11.996 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -60 1969-12-31 16:00:12.779 -60.0 NULL 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 -NULL -61 1969-12-31 16:00:11.842 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:12.454 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:14.192 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:16.558 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -55 1969-12-31 16:00:12.297 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -55 1969-12-31 16:00:13.15 -55.0 1cGVWH7n1QU 55 -55 0 -55.0 -0.0 55.0 -4375.415 0.0 55.0 0.0 -10.175 -55.0 0.47781818181818186 -55.0 0.0 -55 +true -56 1969-12-31 16:00:11.242 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:13.534 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.038 -56.0 1cGVWH7n1QU 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:14.689 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -56 1969-12-31 16:00:16.37 -56.0 cvLH6Eat2yFsyy7p 56 -56 0 -56.0 -0.0 56.0 -4454.9683 0.0 56.0 0.0 -10.175 -56.0 0.4692857142857143 -56.0 0.0 -56 +true -57 1969-12-31 16:00:11.534 -57.0 cvLH6Eat2yFsyy7p 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:13.365 -57.0 1cGVWH7n1QU 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -57 1969-12-31 16:00:14.225 -57.0 821UdmGbkEf4j 57 -57 0 -57.0 -0.0 57.0 -4534.521 0.0 57.0 0.0 -10.175 -57.0 0.4610526315789474 -57.0 0.0 -57 +true -58 1969-12-31 16:00:12.918 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:13.209 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -58 1969-12-31 16:00:14.933 -58.0 cvLH6Eat2yFsyy7p 58 -58 0 -58.0 -0.0 58.0 -4614.074 0.0 58.0 0.0 -10.175 -58.0 0.4531034482758621 -58.0 0.0 -58 +true -59 1969-12-31 16:00:11.065 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.109 -59.0 1cGVWH7n1QU 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.231 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:11.758 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:12.227 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.242 -59.0 821UdmGbkEf4j 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:15.278 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.069 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -59 1969-12-31 16:00:16.125 -59.0 cvLH6Eat2yFsyy7p 59 -59 0 -59.0 -0.0 59.0 -4693.627 0.0 59.0 0.0 -10.175 -59.0 0.44542372881355935 -59.0 0.0 -59 +true -60 1969-12-31 16:00:11.849 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.223 -60.0 1cGVWH7n1QU 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:12.291 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:13.567 -60.0 821UdmGbkEf4j 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:15.188 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -60 1969-12-31 16:00:16.165 -60.0 cvLH6Eat2yFsyy7p 60 -60 0 -60.0 -0.0 60.0 -4773.18 0.0 60.0 0.0 -10.175 -60.0 0.438 -60.0 0.0 -60 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.325 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:15.694 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, ctinyint, @@ -608,43 +608,43 @@ LIMIT 40 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -61 1969-12-31 16:00:00.142 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:02.698 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:03.049 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.165 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -61 1969-12-31 16:00:04.977 -61.0 NULL 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 -NULL -62 1969-12-31 16:00:00.037 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.22 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.515 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:01.734 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:02.373 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:03.85 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:08.198 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.025 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:09.889 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.069 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.225 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:10.485 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.388 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:12.591 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.154 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.247 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.517 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -62 1969-12-31 16:00:14.965 -62.0 NULL 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 -NULL -63 1969-12-31 16:00:01.843 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:03.552 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:06.852 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:07.375 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:10.205 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:11.946 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:12.188 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -63 1969-12-31 16:00:15.436 -63.0 NULL 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 -NULL -64 1969-12-31 16:00:00.199 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:00.29 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:01.785 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:03.944 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:05.997 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:10.858 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:11.912 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:12.339 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 -NULL -64 1969-12-31 16:00:13.274 -64.0 NULL 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -61 1969-12-31 16:00:00.554 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.339 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:02.497 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:03.742 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:07.538 -61.0 821UdmGbkEf4j 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:09.809 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:10.713 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.045 -61.0 1cGVWH7n1QU 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -61 1969-12-31 16:00:12.75 -61.0 cvLH6Eat2yFsyy7p 61 -61 0 -61.0 -0.0 61.0 -4852.733 0.0 61.0 0.0 -10.175 -61.0 0.4308196721311476 -61.0 0.0 -61 +true -62 1969-12-31 16:00:00.337 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.659 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:00.684 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:01.419 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.123 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:02.922 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:04.978 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.756 -62.0 1cGVWH7n1QU 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.847 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:07.903 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:13.677 -62.0 cvLH6Eat2yFsyy7p 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:14.872 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -62 1969-12-31 16:00:15.153 -62.0 821UdmGbkEf4j 62 -62 0 -62.0 -0.0 62.0 -4932.286 0.0 62.0 0.0 -10.175 -62.0 0.4238709677419355 -62.0 0.0 -62 +true -63 1969-12-31 16:00:05.654 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:07.623 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:09.14 -63.0 821UdmGbkEf4j 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:13.752 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:14.899 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -63 1969-12-31 16:00:15.827 -63.0 cvLH6Eat2yFsyy7p 63 -63 0 -63.0 -0.0 63.0 -5011.839 0.0 63.0 0.0 -10.175 -63.0 0.41714285714285715 -63.0 0.0 -63 +true -64 1969-12-31 15:59:58.959 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.013 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.172 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:00.631 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.305 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:01.79 -64.0 1cGVWH7n1QU 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:02.496 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:03.088 -64.0 cvLH6Eat2yFsyy7p 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:04.662 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:10.273 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:11.952 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 +true -64 1969-12-31 16:00:12.857 -64.0 821UdmGbkEf4j 64 -64 0 -64.0 -0.0 64.0 -5091.392 0.0 64.0 0.0 -10.175 -64.0 0.410625 -64.0 0.0 -64 diff --git ql/src/test/results/clientpositive/vectorization_7.q.out ql/src/test/results/clientpositive/vectorization_7.q.out index 008d292ef8..947c758ffe 100644 --- ql/src/test/results/clientpositive/vectorization_7.q.out +++ ql/src/test/results/clientpositive/vectorization_7.q.out @@ -195,31 +195,31 @@ LIMIT 25 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -NULL -2118149242 -7196 56 1969-12-31 15:59:50.462 NULL -4236298484 0 7196 -56 -39 -15242201945432 NULL -56 0 -NULL -2121399625 -7196 27 1969-12-31 15:59:50.046 NULL -4242799250 0 7196 -27 -10 -15265591701500 NULL -27 0 -NULL -2124802690 -7196 -6 1969-12-31 15:59:57.92 NULL -4249605380 0 7196 6 23 -15290080157240 NULL 6 0 -NULL -2128720310 -7196 -52 1969-12-31 15:59:45.978 NULL -4257440620 0 7196 52 69 -15318271350760 NULL 52 0 -NULL -2132232110 -200 60 1969-12-31 15:59:47.019 NULL -4264464220 -200 200 -60 -43 -426446422000 NULL -60 0 -NULL -2132536965 -7196 9 1969-12-31 15:59:46 NULL -4265073930 0 7196 -9 8 -15345736000140 NULL -9 0 -NULL -2135141157 -7196 50 1969-12-31 15:59:50.192 NULL -4270282314 0 7196 -50 -33 -15364475765772 NULL -50 0 -NULL -2137537679 -7196 -25 1969-12-31 15:59:50.136 NULL -4275075358 0 7196 25 42 -15381721138084 NULL 25 0 -NULL -2145481991 -7196 56 1969-12-31 15:59:55.667 NULL -4290963982 0 7196 -56 -39 -15438888407236 NULL -56 0 -NULL NULL -200 -36 1969-12-31 15:59:57.241 NULL NULL -200 200 36 53 NULL NULL 36 0 -NULL NULL -200 -43 1969-12-31 15:59:53.783 NULL NULL -200 200 43 60 NULL NULL 43 0 -NULL NULL -200 -58 1969-12-31 15:59:51.115 NULL NULL -200 200 58 75 NULL NULL 58 0 -NULL NULL -200 22 1969-12-31 15:59:50.109 NULL NULL -200 200 -22 -5 NULL NULL -22 0 -NULL NULL -200 3 1969-12-31 15:59:50.489 NULL NULL -200 200 -3 14 NULL NULL -3 0 -NULL NULL -200 43 1969-12-31 15:59:57.003 NULL NULL -200 200 -43 -26 NULL NULL -43 0 -NULL NULL -200 53 1969-12-31 15:59:49.46 NULL NULL -200 200 -53 -36 NULL NULL -53 0 -NULL NULL -200 9 1969-12-31 15:59:44.108 NULL NULL -200 200 -9 8 NULL NULL -9 0 -NULL NULL -7196 -38 1969-12-31 15:59:53.503 NULL NULL 0 7196 38 55 NULL NULL 38 0 -NULL NULL -7196 -49 1969-12-31 15:59:51.009 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -49 1969-12-31 15:59:52.052 NULL NULL 0 7196 49 66 NULL NULL 49 0 -NULL NULL -7196 -50 1969-12-31 15:59:52.424 NULL NULL 0 7196 50 67 NULL NULL 50 0 -NULL NULL -7196 -61 1969-12-31 15:59:44.823 NULL NULL 0 7196 61 78 NULL NULL 61 0 -NULL NULL -7196 1 1969-12-31 15:59:48.361 NULL NULL 0 7196 -1 16 NULL NULL -1 0 -NULL NULL -7196 14 1969-12-31 15:59:50.291 NULL NULL 0 7196 -14 3 NULL NULL -14 0 -NULL NULL -7196 22 1969-12-31 15:59:52.699 NULL NULL 0 7196 -22 -5 NULL NULL -22 0 +true NULL -15892 29 1969-12-31 15:59:57.937 821UdmGbkEf4j NULL -215 15892 -29 -12 NULL 171 -29 0 +true NULL -15899 50 1969-12-31 15:59:46.926 821UdmGbkEf4j NULL -222 15899 -50 -33 NULL 10210 -50 0 +true NULL -15903 -2 1969-12-31 15:59:46.371 cvLH6Eat2yFsyy7p NULL -226 15903 2 19 NULL 14465 2 0 +true NULL -15920 -64 1969-12-31 15:59:51.859 cvLH6Eat2yFsyy7p NULL -243 15920 64 81 NULL 6687 64 0 +true NULL -15922 -17 1969-12-31 15:59:46.164 821UdmGbkEf4j NULL -245 15922 17 34 NULL 10851 17 0 +true NULL -15923 49 1969-12-31 15:59:47.323 cvLH6Eat2yFsyy7p NULL -246 15923 -49 -32 NULL 2628 -49 0 +true NULL -15935 -6 1969-12-31 15:59:45.859 1cGVWH7n1QU NULL -1 15935 6 23 NULL 12046 6 0 +true NULL -15948 31 1969-12-31 15:59:47.577 821UdmGbkEf4j NULL -14 15948 -31 -14 NULL 7799 -31 0 +true NULL -15948 6 1969-12-31 15:59:49.269 1cGVWH7n1QU NULL -14 15948 -6 11 NULL 12436 -6 0 +true NULL -15980 -6 1969-12-31 15:59:54.84 1cGVWH7n1QU NULL -46 15980 6 23 NULL 14836 6 0 +true NULL -15999 4 1969-12-31 15:59:46.491 1cGVWH7n1QU NULL -65 15999 -4 13 NULL 1231 -4 0 +true NULL -16017 -21 1969-12-31 15:59:44.02 821UdmGbkEf4j NULL -83 16017 21 38 NULL 2282 21 0 +true NULL -16025 -42 1969-12-31 15:59:54.534 cvLH6Eat2yFsyy7p NULL -91 16025 42 59 NULL 14242 42 0 +true NULL -16036 -15 1969-12-31 15:59:58.681 1cGVWH7n1QU NULL -102 16036 15 32 NULL 7928 15 0 +true NULL -16059 -35 1969-12-31 15:59:53.038 821UdmGbkEf4j NULL -125 16059 35 52 NULL 12437 35 0 +true NULL -16076 59 1969-12-31 15:59:55.023 821UdmGbkEf4j NULL -142 16076 -59 -42 NULL 7907 -59 0 +true NULL -16122 50 1969-12-31 15:59:51.608 1cGVWH7n1QU NULL -188 16122 -50 -33 NULL 1828 -50 0 +true NULL -16123 -20 1969-12-31 15:59:51.177 1cGVWH7n1QU NULL -189 16123 20 37 NULL 2217 20 0 +true NULL -16153 35 1969-12-31 15:59:52.036 1cGVWH7n1QU NULL -219 16153 -35 -18 NULL 14817 -35 0 +true NULL -16169 5 1969-12-31 15:59:45.059 1cGVWH7n1QU NULL -235 16169 -5 12 NULL 6104 -5 0 +true NULL -16207 -4 1969-12-31 15:59:45.956 cvLH6Eat2yFsyy7p NULL -16 16207 4 21 NULL 8290 4 0 +true NULL -16221 -12 1969-12-31 15:59:45.877 1cGVWH7n1QU NULL -30 16221 12 29 NULL 1378 12 0 +true NULL -16227 2 1969-12-31 15:59:44.065 821UdmGbkEf4j NULL -36 16227 -2 15 NULL 9761 -2 0 +true NULL -16305 3 1969-12-31 15:59:43.878 1cGVWH7n1QU NULL -114 16305 -3 14 NULL 8491 -3 0 +true NULL -16339 15 1969-12-31 15:59:53.966 821UdmGbkEf4j NULL -148 16339 -15 2 NULL 12588 -15 0 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT cboolean1, cbigint, diff --git ql/src/test/results/clientpositive/vectorization_limit.q.out ql/src/test/results/clientpositive/vectorization_limit.q.out index 1d5db091de..a834620235 100644 --- ql/src/test/results/clientpositive/vectorization_limit.q.out +++ ql/src/test/results/clientpositive/vectorization_limit.q.out @@ -369,6 +369,7 @@ POSTHOOK: query: select ctinyint,avg(cdouble + 1) as cavg from alltypesorc group POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 326.44444444444446 -46 3033.55 -47 -574.6428571428571 -48 1672.909090909091 @@ -388,7 +389,6 @@ POSTHOOK: Input: default@alltypesorc -62 245.69387755102042 -63 2178.7272727272725 -64 373.52941176470586 -NULL 9370.0945309795 PREHOOK: query: explain vectorization detail select distinct(ctinyint) as cdistinct from alltypesorc order by cdistinct limit 20 PREHOOK: type: QUERY @@ -495,6 +495,7 @@ POSTHOOK: query: select distinct(ctinyint) as cdistinct from alltypesorc order b POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 -46 -47 -48 @@ -514,7 +515,6 @@ POSTHOOK: Input: default@alltypesorc -62 -63 -64 -NULL PREHOOK: query: explain vectorization detail select ctinyint, count(distinct(cdouble)) as count_distinct from alltypesorc group by ctinyint order by ctinyint, count_distinct limit 20 PREHOOK: type: QUERY @@ -643,6 +643,7 @@ POSTHOOK: query: select ctinyint, count(distinct(cdouble)) as count_distinct fro POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### +-45 24 -46 24 -47 22 -48 29 @@ -662,7 +663,6 @@ POSTHOOK: Input: default@alltypesorc -62 27 -63 19 -64 24 -NULL 2932 PREHOOK: query: explain vectorization detail select ctinyint,cdouble from alltypesorc order by ctinyint,cdouble limit 0 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/vectorization_part_project.q.out ql/src/test/results/clientpositive/vectorization_part_project.q.out index 50052fde38..44755e5ae2 100644 --- ql/src/test/results/clientpositive/vectorization_part_project.q.out +++ ql/src/test/results/clientpositive/vectorization_part_project.q.out @@ -64,15 +64,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_part - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdouble + 2.0D) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Map Vectorization: @@ -92,13 +92,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 200 Data size: 54496 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 200 Data size: 40674 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 2720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2030 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 2720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2030 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -122,13 +122,13 @@ POSTHOOK: Input: default@alltypesorc_part POSTHOOK: Input: default@alltypesorc_part@ds=2011 POSTHOOK: Input: default@alltypesorc_part@ds=2012 #### A masked pattern was here #### -NULL -NULL --15863.0 --15863.0 --14988.0 --14988.0 --14646.0 --14646.0 --14236.0 --14236.0 +-15990.0 +-15990.0 +-15918.0 +-15918.0 +-15890.0 +-15890.0 +-14305.0 +-14305.0 +-12514.0 +-12514.0 diff --git ql/src/test/results/clientpositive/vectorized_join46_mr.q.out ql/src/test/results/clientpositive/vectorized_join46_mr.q.out new file mode 100644 index 0000000000..53c32ffcf8 --- /dev/null +++ ql/src/test/results/clientpositive/vectorized_join46_mr.q.out @@ -0,0 +1,2050 @@ +PREHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test1 +POSTHOOK: query: CREATE TABLE test1 (key INT, value INT, col_1 STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test1 +PREHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'), + (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test1 +POSTHOOK: query: INSERT INTO test1 VALUES (NULL, NULL, 'None'), (98, NULL, 'None'), + (99, 0, 'Alice'), (99, 2, 'Mat'), (100, 1, 'Bob'), (101, 2, 'Car') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test1 +POSTHOOK: Lineage: test1.col_1 SCRIPT [] +POSTHOOK: Lineage: test1.key SCRIPT [] +POSTHOOK: Lineage: test1.value SCRIPT [] +col1 col2 col3 +PREHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@test2 +POSTHOOK: query: CREATE TABLE test2 (key INT, value INT, col_2 STRING) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@test2 +PREHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'), + (104, 3, 'Fli'), (105, NULL, 'None') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@test2 +POSTHOOK: query: INSERT INTO test2 VALUES (102, 2, 'Del'), (103, 2, 'Ema'), + (104, 3, 'Fli'), (105, NULL, 'None') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@test2 +POSTHOOK: Lineage: test2.col_2 SCRIPT [] +POSTHOOK: Lineage: test2.key SCRIPT [] +POSTHOOK: Lineage: test2.value SCRIPT [] +col1 col2 col3 +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND test1.key between 100 and 102 + AND test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND test1.key between 100 and 102 + AND test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + filterExpr: key BETWEEN 100 AND 102 (type: boolean) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key BETWEEN 100 AND 102 (type: boolean) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + filter predicates: + 0 {_col0 BETWEEN 100 AND 102} + 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col0 BETWEEN 100 AND 102} + 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND test1.key between 100 and 102 + AND test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND test1.key between 100 and 102 + AND test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat NULL NULL NULL +NULL NULL None NULL NULL NULL +Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102 + AND test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102 + AND test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + filterExpr: key BETWEEN 100 AND 102 (type: boolean) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key BETWEEN 100 AND 102 (type: boolean) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + filter predicates: + 0 {_col0 BETWEEN 100 AND 102} + 1 + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col0 BETWEEN 100 AND 102} + 1 + keys: + 0 + 1 + Map Join Vectorization: + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Outer Join has keys IS false + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102 + AND test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102 + AND test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat NULL NULL NULL +NULL NULL None NULL NULL NULL +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value AND true) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value AND true) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:test1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:test1 + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + Map Join Vectorization: + bigTableKeyExpressions: col 1:int + className: VectorMapJoinOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Outer Join has keys IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value AND true) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value AND true) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + filter predicates: + 0 {_col0 BETWEEN 100 AND 102} + 1 + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + TableScan Vectorization: + native: true + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Select Vectorization: + className: VectorSelectOperator + native: true + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + filter predicates: + 0 {_col0 BETWEEN 100 AND 102} + 1 + keys: + 0 + 1 + Map Join Vectorization: + className: VectorMapJoinOuterFilteredOperator + native: false + nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true + nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false, Outer Join has keys IS false + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + File Sink Vectorization: + className: VectorFileSinkOperator + native: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFormatFeatureSupport: [DECIMAL_64] + featureSupportInUse: [DECIMAL_64] + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + allNative: false + usesVectorUDFAdaptor: false + vectorized: true + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat NULL NULL NULL +NULL NULL None NULL NULL NULL +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_1:test2 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_1:test2 + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Left Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 LEFT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL None NULL NULL NULL +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:test1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:test1 + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:test1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:test1 + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:test1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:test1 + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 + 1 + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-3 depends on stages: Stage-4 + Stage-0 depends on stages: Stage-3 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce Local Work + Alias -> Map Local Tables: + $hdt$_0:test1 + Fetch Operator + limit: -1 + Alias -> Map Local Operator Tree: + $hdt$_0:test1 + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + HashTable Sink Operator + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + + Stage: Stage-3 + Map Reduce + Map Operator Tree: + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Map Join Operator + condition map: + Right Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Map Vectorization: + enabled: true + enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true + inputFileFormats: org.apache.hadoop.mapred.TextInputFormat + notVectorizedReason: MAPJOIN operator: Non-equi joins not supported + vectorized: false + Local Work: + Map Reduce Local Work + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 RIGHT OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Map Vectorization: + enabled: false + enabledConditionsNotMet: Vectorized map work only works with 1 TableScanOperator IS false + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102 + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None 102 2 Del +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Map Vectorization: + enabled: false + enabledConditionsNotMet: Vectorized map work only works with 1 TableScanOperator IS false + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col0 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test1.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +100 1 Bob 103 2 Ema +100 1 Bob 104 3 Fli +100 1 Bob 105 NULL None +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +101 2 Car 104 3 Fli +101 2 Car 105 NULL None +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL None NULL NULL NULL +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int), _col2 (type: string) + Map Vectorization: + enabled: false + enabledConditionsNotMet: Vectorized map work only works with 1 TableScanOperator IS false + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 + 1 + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {((_col1 = _col4) or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 24 Data size: 476 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product +PREHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + OR test2.key between 100 and 102) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob 102 2 Del +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None 102 2 Del +99 0 Alice 102 2 Del +99 2 Mat 102 2 Del +99 2 Mat 103 2 Ema +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None 102 2 Del +PREHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN VECTORIZATION OPERATOR +SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +POSTHOOK: type: QUERY +Explain +PLAN VECTORIZATION: + enabled: true + enabledConditionsMet: [hive.vectorized.execution.enabled IS true] + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: test1 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_1 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 6 Data size: 56 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col2 (type: string) + TableScan + alias: test2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: int), value (type: int), col_2 (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: int) + sort order: + + Map-reduce partition columns: _col1 (type: int) + Statistics: Num rows: 4 Data size: 38 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col2 (type: string) + Map Vectorization: + enabled: false + enabledConditionsNotMet: Vectorized map work only works with 1 TableScanOperator IS false + Reduce Vectorization: + enabled: false + enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true + enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false + Reduce Operator Tree: + Join Operator + condition map: + Full Outer Join 0 to 1 + keys: + 0 _col1 (type: int) + 1 _col1 (type: int) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + residual filter predicates: {(_col0 BETWEEN 100 AND 102 or _col3 BETWEEN 100 AND 102)} + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 6 Data size: 61 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +PREHOOK: type: QUERY +PREHOOK: Input: default@test1 +PREHOOK: Input: default@test2 +#### A masked pattern was here #### +POSTHOOK: query: SELECT * +FROM test1 FULL OUTER JOIN test2 +ON (test1.value=test2.value + AND (test1.key between 100 and 102 + OR test2.key between 100 and 102)) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@test1 +POSTHOOK: Input: default@test2 +#### A masked pattern was here #### +test1.key test1.value test1.col_1 test2.key test2.value test2.col_2 +100 1 Bob NULL NULL NULL +101 2 Car 102 2 Del +101 2 Car 103 2 Ema +98 NULL None NULL NULL NULL +99 0 Alice NULL NULL NULL +99 2 Mat 102 2 Del +NULL NULL NULL 104 3 Fli +NULL NULL NULL 105 NULL None +NULL NULL None NULL NULL NULL diff --git ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index 4da63b18c4..fc7ad07afe 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -253,7 +253,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -264,7 +264,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 6, 7, 8, 9, 10, 11, 12, 0, 1, 3, 13, 14, 15, 16, 17] selectExpressions: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 7:int, VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 8:int, VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 9:int, VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 10:int, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 11:int, VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 12:int, IfExprTimestampColumnScalar(col 0:boolean, col 1:timestamp, val 1319-01-25 08:31:57.778) -> 13:timestamp, IfExprTimestampScalarColumn(col 0:boolean, val 2000-12-18 00:42:30.0005, col 1:timestamp) -> 14:timestamp, IfExprTimestampColumnColumn(col 0:boolean, col 1:timestampcol 3:timestamp) -> 15:timestamp, IfExprColumnNull(col 0:boolean, col 1:timestamp, null)(children: col 0:boolean, col 1:timestamp) -> 16:timestamp, IfExprNullColumn(col 0:boolean, null, col 3)(children: col 0:boolean, col 3:timestamp) -> 17:timestamp - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -273,7 +273,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int), _col9 (type: boolean), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp), _col13 (type: timestamp), _col14 (type: timestamp), _col15 (type: timestamp), _col16 (type: timestamp) Execution mode: vectorized Map Vectorization: @@ -293,10 +293,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: boolean), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp), VALUE._col12 (type: timestamp), VALUE._col13 (type: timestamp), VALUE._col14 (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16 - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -354,6 +354,46 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.628 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.628 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.637 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.637 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.64 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.64 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.661 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.661 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.676 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.676 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.705 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.705 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.709 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.709 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.72 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.72 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.721 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.721 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.749 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.749 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.771 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:43.771 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.773 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.773 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.782 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.782 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.783 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:43.783 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.807 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.807 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 NULL 1969-12-31 23:59:43.82 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:43.82 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.619 1969-12-31 16:00:14.793 1969-12-31 23:59:43.619 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.619 1969-12-31 23:59:43.619 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.627 1969-12-31 16:00:03.679 1969-12-31 23:59:43.627 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.627 1969-12-31 23:59:43.627 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.631 1969-12-31 16:00:06.612 1969-12-31 23:59:43.631 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.631 1969-12-31 23:59:43.631 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.642 1969-12-31 16:00:04.424 1969-12-31 23:59:43.642 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.642 1969-12-31 23:59:43.642 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.643 1969-12-31 16:00:11.764 1969-12-31 23:59:43.643 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.643 1969-12-31 23:59:43.643 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.645 1969-12-31 16:00:00.077 1969-12-31 23:59:43.645 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.645 1969-12-31 23:59:43.645 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.661 1969-12-31 15:59:58.732 1969-12-31 23:59:43.661 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.661 1969-12-31 23:59:43.661 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.689 1969-12-31 15:59:46.848 1969-12-31 23:59:43.689 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.689 1969-12-31 23:59:43.689 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.695 1969-12-31 16:00:06.867 1969-12-31 23:59:43.695 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.695 1969-12-31 23:59:43.695 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.707 1969-12-31 15:59:56.965 1969-12-31 23:59:43.707 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.707 1969-12-31 23:59:43.707 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.71 1969-12-31 16:00:00.687 1969-12-31 23:59:43.71 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.71 1969-12-31 23:59:43.71 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.723 1969-12-31 16:00:03.375 1969-12-31 23:59:43.723 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.723 1969-12-31 23:59:43.723 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.745 1969-12-31 16:00:04.052 1969-12-31 23:59:43.745 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.745 1969-12-31 23:59:43.745 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.764 1969-12-31 16:00:10.52 1969-12-31 23:59:43.764 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.764 1969-12-31 23:59:43.764 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.775 1969-12-31 15:59:48.003 1969-12-31 23:59:43.775 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.775 1969-12-31 23:59:43.775 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.779 1969-12-31 15:59:53.274 1969-12-31 23:59:43.779 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.779 1969-12-31 23:59:43.779 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.785 1969-12-31 16:00:14.096 1969-12-31 23:59:43.785 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.785 1969-12-31 23:59:43.785 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.792 1969-12-31 15:59:52.041 1969-12-31 23:59:43.792 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.792 1969-12-31 23:59:43.792 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.793 1969-12-31 15:59:56.316 1969-12-31 23:59:43.793 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.793 1969-12-31 23:59:43.793 NULL +-16 1969 12 31 31 1 23 59 43 true 1969-12-31 23:59:43.811 1969-12-31 16:00:00.479 1969-12-31 23:59:43.811 2000-12-18 08:42:30.0005 1969-12-31 23:59:43.811 1969-12-31 23:59:43.811 NULL +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:55.451 NULL 1969-12-31 15:59:55.451 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:58.174 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:58.174 NULL 1969-12-31 15:59:58.174 +-29 1969 12 31 31 1 23 59 30 NULL 1969-12-31 23:59:30.929 1969-12-31 15:59:58.456 1319-02-02 16:31:57.778 1969-12-31 23:59:30.929 1969-12-31 15:59:58.456 NULL 1969-12-31 15:59:58.456 -45479202281 528 10 25 25 43 8 15 18 true 0528-10-27 08:15:18.941718273 NULL 0528-10-27 08:15:18.941718273 2000-12-18 08:42:30.0005 0528-10-27 08:15:18.941718273 0528-10-27 08:15:18.941718273 NULL 1632453512 2021 9 24 24 38 3 18 32 NULL 2021-09-24 03:18:32.4 1974-10-04 17:21:03.989 1319-02-02 16:31:57.778 2021-09-24 03:18:32.4 1974-10-04 17:21:03.989 NULL 1974-10-04 17:21:03.989 1632453512 2021 9 24 24 38 3 18 32 false 2021-09-24 03:18:32.4 1999-10-03 16:59:10.396903939 1319-02-02 16:31:57.778 2021-09-24 03:18:32.4 1999-10-03 16:59:10.396903939 NULL 1999-10-03 16:59:10.396903939 @@ -363,47 +403,7 @@ POSTHOOK: Input: default@alltypesorc_string 163809583224 7160 12 2 2 48 6 0 24 NULL 7160-12-02 06:00:24.81200852 NULL 1319-02-02 16:31:57.778 7160-12-02 06:00:24.81200852 NULL NULL NULL 490699811 1985 7 20 20 29 9 30 11 true 1985-07-20 09:30:11 1319-02-02 16:31:57.778 1985-07-20 09:30:11 2000-12-18 08:42:30.0005 1985-07-20 09:30:11 1985-07-20 09:30:11 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:44.028 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:44.028 NULL 1969-12-31 15:59:44.028 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:44.809 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:44.809 NULL 1969-12-31 15:59:44.809 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:45.949 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:45.949 NULL 1969-12-31 15:59:45.949 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:50.531 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:50.531 NULL 1969-12-31 15:59:50.531 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:51.009 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:51.009 NULL 1969-12-31 15:59:51.009 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 15:59:53.761 1319-02-02 16:31:57.778 NULL 1969-12-31 15:59:53.761 NULL 1969-12-31 15:59:53.761 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:00.905 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:00.905 NULL 1969-12-31 16:00:00.905 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:03.586 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:03.586 NULL 1969-12-31 16:00:03.586 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:05.227 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:05.227 NULL 1969-12-31 16:00:05.227 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:05.535 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:05.535 NULL 1969-12-31 16:00:05.535 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.02 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.02 NULL 1969-12-31 16:00:07.02 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.365 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.365 NULL 1969-12-31 16:00:07.365 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.517 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.517 NULL 1969-12-31 16:00:07.517 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:07.767 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:07.767 NULL 1969-12-31 16:00:07.767 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:08.602 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:08.602 NULL 1969-12-31 16:00:08.602 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:09.938 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:09.938 NULL 1969-12-31 16:00:09.938 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:14.214 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:14.214 NULL 1969-12-31 16:00:14.214 -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL 1969-12-31 16:00:14.783 1319-02-02 16:31:57.778 NULL 1969-12-31 16:00:14.783 NULL 1969-12-31 16:00:14.783 NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false NULL NULL 1319-02-02 16:31:57.778 NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:43.773 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:44.262 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:44.568 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:45.697 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:47.351 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:47.446 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:48.023 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:48.629 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:49.177 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:49.208 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:50.789 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:51.245 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:52.372 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 15:59:55.249 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.661 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:00.784 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:09.313 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:09.538 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:09.986 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:11.031 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 1969-12-31 16:00:11.465 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL 2024-11-11 16:42:41.101 NULL 2000-12-18 08:42:30.0005 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL true NULL NULL NULL 2000-12-18 08:42:30.0005 NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT @@ -446,7 +446,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -457,7 +457,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [5, 7, 8, 9, 6, 11, 10, 13, 14] selectExpressions: VectorUDFUnixTimeStampString(col 2:string) -> 5:bigint, VectorUDFYearDate(col 6, field YEAR)(children: CastStringToDate(col 2:string) -> 6:date) -> 7:int, VectorUDFMonthDate(col 6, field MONTH)(children: CastStringToDate(col 2:string) -> 6:date) -> 8:int, VectorUDFDayOfMonthDate(col 6, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 6:date) -> 9:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 6:int, VectorUDFWeekOfYearDate(col 10, field WEEK_OF_YEAR)(children: CastStringToDate(col 2:string) -> 10:date) -> 11:int, VectorUDFHourTimestamp(col 12:timestamp, field HOUR_OF_DAY)(children: CastStringToTimestamp(col 2:string) -> 12:timestamp) -> 10:int, VectorUDFMinuteTimestamp(col 12:timestamp, field MINUTE)(children: CastStringToTimestamp(col 2:string) -> 12:timestamp) -> 13:int, VectorUDFSecondTimestamp(col 12:timestamp, field SECOND)(children: CastStringToTimestamp(col 2:string) -> 12:timestamp) -> 14:int - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: bigint) sort order: + @@ -466,7 +466,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: int), _col5 (type: int), _col6 (type: int), _col7 (type: int), _col8 (type: int) Execution mode: vectorized Map Vectorization: @@ -486,10 +486,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: bigint), VALUE._col0 (type: int), VALUE._col1 (type: int), VALUE._col2 (type: int), VALUE._col3 (type: int), VALUE._col4 (type: int), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -531,7 +531,47 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 +-17 1969 12 31 31 1 23 59 43 -2736272726 1883 4 17 17 16 4 14 34 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 +-30 1969 12 31 31 1 23 59 30 -62018199211 4 9 24 22 39 18 26 29 1365554626 2013 4 10 10 15 0 43 46 206730996125 8521 1 16 16 3 20 42 5 @@ -543,46 +583,6 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT to_unix_timestamp(ctimestamp1) = to_unix_timestamp(stimestamp1) AS c1, year(ctimestamp1) = year(stimestamp1), @@ -623,7 +623,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -634,7 +634,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [7, 6, 8, 9, 11, 10, 14, 15, 16] selectExpressions: LongColEqualLongColumn(col 5:bigint, col 6:bigint)(children: VectorUDFUnixTimeStampTimestamp(col 1:timestamp) -> 5:bigint, VectorUDFUnixTimeStampString(col 2:string) -> 6:bigint) -> 7:boolean, LongColEqualLongColumn(col 5:int, col 8:int)(children: VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 5:int, VectorUDFYearDate(col 6, field YEAR)(children: CastStringToDate(col 2:string) -> 6:date) -> 8:int) -> 6:boolean, LongColEqualLongColumn(col 5:int, col 9:int)(children: VectorUDFMonthTimestamp(col 1:timestamp, field MONTH) -> 5:int, VectorUDFMonthDate(col 8, field MONTH)(children: CastStringToDate(col 2:string) -> 8:date) -> 9:int) -> 8:boolean, LongColEqualLongColumn(col 5:int, col 10:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthDate(col 9, field DAY_OF_MONTH)(children: CastStringToDate(col 2:string) -> 9:date) -> 10:int) -> 9:boolean, LongColEqualLongColumn(col 5:int, col 10:int)(children: VectorUDFDayOfMonthTimestamp(col 1:timestamp, field DAY_OF_MONTH) -> 5:int, VectorUDFDayOfMonthString(col 2:string, fieldStart 8, fieldLength 2) -> 10:int) -> 11:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFWeekOfYearTimestamp(col 1:timestamp, field WEEK_OF_YEAR) -> 5:int, VectorUDFWeekOfYearDate(col 10, field WEEK_OF_YEAR)(children: CastStringToDate(col 2:string) -> 10:date) -> 12:int) -> 10:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFHourTimestamp(col 1:timestamp, field HOUR_OF_DAY) -> 5:int, VectorUDFHourTimestamp(col 13:timestamp, field HOUR_OF_DAY)(children: CastStringToTimestamp(col 2:string) -> 13:timestamp) -> 12:int) -> 14:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 5:int, VectorUDFMinuteTimestamp(col 13:timestamp, field MINUTE)(children: CastStringToTimestamp(col 2:string) -> 13:timestamp) -> 12:int) -> 15:boolean, LongColEqualLongColumn(col 5:int, col 12:int)(children: VectorUDFSecondTimestamp(col 1:timestamp, field SECOND) -> 5:int, VectorUDFSecondTimestamp(col 13:timestamp, field SECOND)(children: CastStringToTimestamp(col 2:string) -> 13:timestamp) -> 12:int) -> 16:boolean - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + @@ -643,7 +643,7 @@ STAGE PLANS: native: false nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) Execution mode: vectorized Map Vectorization: @@ -663,10 +663,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: boolean), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -716,50 +716,50 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL -NULL NULL NULL NULL NULL NULL NULL NULL NULL false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false false +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true +false true true true true true true true true PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT to_unix_timestamp(stimestamp1) AS c1, year(stimestamp1), @@ -916,7 +916,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -926,7 +926,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(ctimestamp1), max(ctimestamp1), count(ctimestamp1), count() Group By Vectorization: @@ -1000,7 +1000,7 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -0528-10-27 08:15:18.941718273 7160-12-02 06:00:24.81200852 8 52 +0528-10-27 08:15:18.941718273 7160-12-02 06:00:24.81200852 48 52 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT round(sum(ctimestamp1), 3) FROM alltypesorc_string @@ -1023,7 +1023,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1033,7 +1033,7 @@ STAGE PLANS: className: VectorSelectOperator native: true projectedOutputColumnNums: [1] - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(ctimestamp1) Group By Vectorization: @@ -1105,7 +1105,7 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -2.89160478029166E11 +2.891604773267E11 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT round(avg(ctimestamp1), 0), variance(ctimestamp1) between 8.97077295279421E19 and 8.97077295279422E19, @@ -1142,7 +1142,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesorc_string - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE TableScan Vectorization: native: true Select Operator @@ -1153,7 +1153,7 @@ STAGE PLANS: native: true projectedOutputColumnNums: [1, 5, 8] selectExpressions: CastTimestampToDouble(col 1:timestamp) -> 5:double, DoubleColMultiplyDoubleColumn(col 6:double, col 7:double)(children: CastTimestampToDouble(col 1:timestamp) -> 6:double, CastTimestampToDouble(col 1:timestamp) -> 7:double) -> 8:double - Statistics: Num rows: 52 Data size: 3179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 52 Data size: 8979 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0), count(_col0), sum(_col2), sum(_col1) Group By Vectorization: @@ -1239,4 +1239,4 @@ FROM alltypesorc_string POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -3.6145059754E10 false false false 7.5245178084814E10 7.5245178084814E10 7.5245178084814E10 8.0440478971476E10 +6.024176611E9 false false false 3.3542405863247E10 3.3542405863247E10 3.3542405863247E10 3.3897361841912E10 diff --git ql/src/test/results/clientpositive/windowing_gby2.q.out ql/src/test/results/clientpositive/windowing_gby2.q.out index 670c701837..f5018d9369 100644 --- ql/src/test/results/clientpositive/windowing_gby2.q.out +++ ql/src/test/results/clientpositive/windowing_gby2.q.out @@ -79,7 +79,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: 0 raw input shape: window functions: @@ -122,11 +122,11 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@cbo_t3 #### A masked pattern was here #### 1 -2 -2 -2 -5 -5 +1 +1 +4 +4 +6 7 PREHOOK: query: explain select avg(cast(ws.key as int)) over (partition by min(ws.value) order by sum(ws.c_int)) as return_rank @@ -206,7 +206,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col2 ASC NULLS FIRST + order by: _col2 ASC NULLS LAST partition by: _col1 raw input shape: window functions: @@ -614,7 +614,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: (UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS FIRST + order by: (UDFToDouble(_col1) / UDFToDouble(_col2)) ASC NULLS LAST partition by: 0 raw input shape: window functions: diff --git ql/src/test/results/clientpositive/windowing_navfn.q.out ql/src/test/results/clientpositive/windowing_navfn.q.out index 8c73534d70..ff974839a5 100644 --- ql/src/test/results/clientpositive/windowing_navfn.q.out +++ ql/src/test/results/clientpositive/windowing_navfn.q.out @@ -731,11 +731,11 @@ POSTHOOK: Input: default@wtest_n0 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL -2 NULL NULL NULL NULL NULL -2 NULL NULL 1 NULL 1 -2 1 NULL 1 NULL 1 -2 2 1 1 NULL 1 -2 3 2 2 NULL 1 +2 1 1 1 1 1 +2 2 1 1 1 1 +2 3 2 2 1 1 +2 NULL 3 3 1 1 +2 NULL NULL NULL 1 1 3 1 1 1 1 1 3 2 1 1 1 1 3 3 2 2 1 1 @@ -797,11 +797,11 @@ POSTHOOK: Input: default@wtest_n0 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL 1 NULL NULL NULL NULL NULL -2 NULL NULL NULL NULL NULL -2 NULL 1 1 1 1 2 1 2 2 2 2 2 2 3 3 3 3 -2 3 3 3 3 3 +2 3 NULL 3 NULL 3 +2 NULL NULL 3 NULL 3 +2 NULL NULL NULL NULL 3 3 1 2 2 2 2 3 2 3 3 3 3 3 3 4 4 4 4 diff --git ql/src/test/results/clientpositive/windowing_order_null.q.out ql/src/test/results/clientpositive/windowing_order_null.q.out index 2bda769a4b..c34330b68c 100644 --- ql/src/test/results/clientpositive/windowing_order_null.q.out +++ ql/src/test/results/clientpositive/windowing_order_null.q.out @@ -80,13 +80,13 @@ POSTHOOK: Input: default@over10k #### A masked pattern was here #### NULL alice ichabod NULL NULL NULL calvin miller NULL NULL -0.01 NULL NULL NULL -0.01 NULL NULL NULL 0.01 calvin miller 8.39 8.390000343322754 -0.02 NULL NULL NULL +0.01 NULL NULL 8.390000343322754 +0.01 NULL NULL 8.390000343322754 0.02 holly polk 5.29 5.289999961853027 0.02 wendy quirinius 25.5 30.789999961853027 0.02 yuri laertes 37.59 68.38000011444092 +0.02 NULL NULL 68.38000011444092 0.03 nick steinbeck 79.24 79.23999786376953 PREHOOK: query: select ts, s, f, sum(f) over (partition by ts order by f asc nulls first range between current row and unbounded following) from over10k limit 10 PREHOOK: type: QUERY diff --git ql/src/test/results/clientpositive/windowing_streaming.q.out ql/src/test/results/clientpositive/windowing_streaming.q.out index 42609480bc..0f2ac4a09d 100644 --- ql/src/test/results/clientpositive/windowing_streaming.q.out +++ ql/src/test/results/clientpositive/windowing_streaming.q.out @@ -83,7 +83,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -155,7 +155,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col1 ASC NULLS FIRST + order by: _col1 ASC NULLS LAST partition by: _col2 raw input shape: window functions: @@ -355,7 +355,7 @@ STAGE PLANS: Windowing table definition input alias: ptf_1 name: windowingtablefunction - order by: _col5 ASC NULLS FIRST + order by: _col5 ASC NULLS LAST partition by: _col0 raw input shape: window functions: @@ -417,48 +417,10 @@ where ctinyint is null POSTHOOK: type: QUERY POSTHOOK: Input: default@sb_n0 #### A masked pattern was here #### -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 +NULL -16379.0 1 +NULL -16310.0 2 +NULL -16309.0 3 +NULL -16307.0 4 PREHOOK: query: drop table if exists sD_n0 PREHOOK: type: DROPTABLE POSTHOOK: query: drop table if exists sD_n0 @@ -488,45 +450,7 @@ where ctinyint is null POSTHOOK: type: QUERY POSTHOOK: Input: default@sd_n0 #### A masked pattern was here #### -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 -NULL NULL 1 +NULL -16379.0 1 +NULL -16310.0 2 +NULL -16309.0 3 +NULL -16307.0 4 diff --git ql/src/test/results/clientpositive/windowing_windowspec3.q.out ql/src/test/results/clientpositive/windowing_windowspec3.q.out index 7dbb275f3c..440c1e5c0b 100644 --- ql/src/test/results/clientpositive/windowing_windowspec3.q.out +++ ql/src/test/results/clientpositive/windowing_windowspec3.q.out @@ -86,11 +86,11 @@ from emp_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@emp_n0 #### A masked pattern was here #### -10 7988 NULL 1500.0 3000.0 3000.0 NULL NULL 8750.0 3000.0 -10 7987 NULL 1500.0 3000.0 3000.0 NULL NULL 8750.0 3000.0 -10 7782 1981-06-09 2450.0 2450.0 2450.0 NULL NULL 6300.0 5450.0 -10 7839 1981-11-17 5000.0 5000.0 6300.0 NULL 1300.0 1300.0 10450.0 -10 7934 1982-01-23 1300.0 6300.0 6300.0 5000.0 NULL NULL 11750.0 +10 7782 1981-06-09 2450.0 2450.0 2450.0 NULL NULL 9300.0 2450.0 +10 7839 1981-11-17 5000.0 5000.0 6300.0 NULL 1300.0 4300.0 7450.0 +10 7934 1982-01-23 1300.0 6300.0 6300.0 5000.0 NULL 3000.0 8750.0 +10 7988 NULL 1500.0 3000.0 3000.0 3000.0 NULL NULL 11750.0 +10 7987 NULL 1500.0 3000.0 3000.0 3000.0 NULL NULL 11750.0 20 7369 1980-12-17 800.0 800.0 800.0 NULL NULL 10075.0 800.0 20 7566 1981-04-02 2975.0 2975.0 2975.0 NULL NULL 7100.0 3775.0 20 7902 1981-12-03 3000.0 3000.0 3000.0 NULL NULL 4100.0 6775.0 @@ -124,11 +124,11 @@ from emp_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@emp_n0 #### A masked pattern was here #### -10 7988 NULL 1500.0 3000.0 3000.0 NULL NULL 8750.0 3000.0 -10 7987 NULL 1500.0 3000.0 3000.0 NULL NULL 8750.0 3000.0 -10 7782 1981-06-09 00:00:00 2450.0 2450.0 2450.0 NULL NULL 6300.0 5450.0 -10 7839 1981-11-17 00:00:00 5000.0 5000.0 6300.0 NULL 1300.0 1300.0 10450.0 -10 7934 1982-01-23 00:00:00 1300.0 6300.0 6300.0 5000.0 NULL NULL 11750.0 +10 7782 1981-06-09 00:00:00 2450.0 2450.0 2450.0 NULL NULL 9300.0 2450.0 +10 7839 1981-11-17 00:00:00 5000.0 5000.0 6300.0 NULL 1300.0 4300.0 7450.0 +10 7934 1982-01-23 00:00:00 1300.0 6300.0 6300.0 5000.0 NULL 3000.0 8750.0 +10 7988 NULL 1500.0 3000.0 3000.0 3000.0 NULL NULL 11750.0 +10 7987 NULL 1500.0 3000.0 3000.0 3000.0 NULL NULL 11750.0 20 7369 1980-12-17 00:00:00 800.0 800.0 800.0 NULL NULL 10075.0 800.0 20 7566 1981-04-02 00:00:00 2975.0 2975.0 2975.0 NULL NULL 7100.0 3775.0 20 7902 1981-12-03 00:00:00 3000.0 3000.0 3000.0 NULL NULL 4100.0 6775.0 @@ -172,12 +172,12 @@ POSTHOOK: Input: default@emp_n0 20 7876 NULL NULL NULL NULL NULL NULL NULL 20 7566 NULL NULL NULL NULL NULL NULL NULL 20 7369 NULL NULL NULL NULL NULL NULL NULL -30 7698 NULL NULL NULL NULL NULL 550.0 NULL -30 7900 NULL NULL NULL NULL NULL 550.0 NULL 30 7844 0.0 0.0 0.0 NULL NULL 733.3333333333334 0.0 30 7499 300.0 300.0 400.0 NULL 500.0 1400.0 266.6666666666667 30 7521 500.0 400.0 400.0 300.0 NULL 1400.0 266.6666666666667 30 7654 1400.0 1400.0 1400.0 NULL NULL NULL 550.0 +30 7698 NULL NULL NULL NULL NULL NULL 550.0 +30 7900 NULL NULL NULL NULL NULL NULL 550.0 PREHOOK: query: select deptno, empno, stock, salary, avg(salary) over (partition by deptno order by stock range 200 preceding), avg(salary) over (partition by deptno order by stock range between 200 preceding and 200 following), @@ -200,19 +200,19 @@ from emp_n0 POSTHOOK: type: QUERY POSTHOOK: Input: default@emp_n0 #### A masked pattern was here #### -10 7839 NULL 5000.0 5000.0 5000.0 NULL NULL 1687.5 5000.0 -10 7782 50.00 2450.0 2450.0 1687.5 NULL 1500.0 NULL 2350.0 -10 7934 100.00 1300.0 1875.0 1687.5 NULL NULL NULL 2350.0 -10 7987 150.50 1500.0 1750.0 1687.5 NULL NULL NULL 2350.0 -10 7988 200.00 1500.0 1687.5 1687.5 2450.0 NULL NULL 2350.0 -20 7788 NULL 3000.0 1975.0 1975.0 NULL NULL 2975.0 1975.0 -20 7902 NULL 3000.0 1975.0 1975.0 NULL NULL 2975.0 1975.0 -20 7876 NULL 1100.0 1975.0 1975.0 NULL NULL 2975.0 1975.0 -20 7369 NULL 800.0 1975.0 1975.0 NULL NULL 2975.0 1975.0 -20 7566 100.00 2975.0 2975.0 2975.0 NULL NULL NULL 2175.0 -30 7900 NULL 950.0 1900.0 1900.0 NULL NULL 1400.0 1900.0 -30 7698 NULL 2850.0 1900.0 1900.0 NULL NULL 1400.0 1900.0 -30 7499 200.50 1600.0 1600.0 1450.0 NULL NULL 1250.0 1630.0 -30 7844 300.00 1500.0 1550.0 1400.0 NULL 1250.0 NULL 1566.6666666666667 -30 7521 300.50 1250.0 1450.0 1400.0 NULL 1250.0 NULL 1566.6666666666667 -30 7654 500.00 1250.0 1333.3333333333333 1333.3333333333333 1375.0 NULL NULL 1566.6666666666667 +10 7782 50.00 2450.0 2450.0 1687.5 NULL 1500.0 5000.0 1687.5 +10 7934 100.00 1300.0 1875.0 1687.5 NULL NULL 5000.0 1687.5 +10 7987 150.50 1500.0 1750.0 1687.5 NULL NULL 5000.0 1687.5 +10 7988 200.00 1500.0 1687.5 1687.5 2450.0 NULL 5000.0 1687.5 +10 7839 NULL 5000.0 5000.0 5000.0 5000.0 NULL NULL 2350.0 +20 7566 100.00 2975.0 2975.0 2975.0 NULL NULL 1975.0 2975.0 +20 7788 NULL 3000.0 1975.0 1975.0 1975.0 NULL NULL 2175.0 +20 7902 NULL 3000.0 1975.0 1975.0 1975.0 NULL NULL 2175.0 +20 7876 NULL 1100.0 1975.0 1975.0 1975.0 NULL NULL 2175.0 +20 7369 NULL 800.0 1975.0 1975.0 1975.0 NULL NULL 2175.0 +30 7499 200.50 1600.0 1600.0 1450.0 NULL NULL 1683.3333333333333 1450.0 +30 7844 300.00 1500.0 1550.0 1400.0 NULL 1250.0 1900.0 1400.0 +30 7521 300.50 1250.0 1450.0 1400.0 NULL 1250.0 1900.0 1400.0 +30 7654 500.00 1250.0 1333.3333333333333 1333.3333333333333 1375.0 NULL 1900.0 1400.0 +30 7698 NULL 2850.0 1900.0 1900.0 1900.0 NULL NULL 1566.6666666666667 +30 7900 NULL 950.0 1900.0 1900.0 1900.0 NULL NULL 1566.6666666666667 diff --git serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java index c6b77ed8ee..7e10af7177 100644 --- serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java +++ serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java @@ -599,8 +599,6 @@ public static Object deserializeReadComplexType(DeserializeRead deserializeRead, return getComplexField(deserializeRead, typeInfo); } - static int fake = 0; - private static Object getComplexField(DeserializeRead deserializeRead, TypeInfo typeInfo) throws IOException { switch (typeInfo.getCategory()) { diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/common/StatsSetupConst.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/StatsSetupConst.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/common/StatsSetupConst.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/StatsSetupConst.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ColumnType.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ColumnType.java similarity index 95% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ColumnType.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ColumnType.java index d5dea4dc3c..39d2b2f96c 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ColumnType.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ColumnType.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.metastore.utils.StringUtils; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -144,6 +145,14 @@ NumericCastOrder.put(DOUBLE_TYPE_NAME, 7); } + private static final Set decoratedTypeNames = new HashSet<>(); + + static { + decoratedTypeNames.add("char"); + decoratedTypeNames.add("decimal"); + decoratedTypeNames.add("varchar"); + } + private static final Map alternateTypeNames = new HashMap<>(); static { @@ -199,6 +208,9 @@ public static String getTypeName(String typeString) { if (typeString == null) return null; String protoType = typeString.toLowerCase().split("\\W")[0]; + if (decoratedTypeNames.contains(protoType)) { + return protoType; + } String realType = alternateTypeNames.get(protoType); return realType == null ? protoType : realType; } @@ -217,8 +229,9 @@ public static boolean areColTypesCompatible(String from, String to) { return NumericCastOrder.get(from) < NumericCastOrder.get(to); } - // Allow string to double conversion - if (StringTypes.contains(from) && to.equals(DOUBLE_TYPE_NAME)) return true; + // Allow string to double/decimal conversion + if (StringTypes.contains(from) && + (to.equals(DOUBLE_TYPE_NAME) || to.equals(DECIMAL_TYPE_NAME))) return true; // Void can go to anything if (from.equals(VOID_TYPE_NAME)) return true; diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFS.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetadataStore.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/MetadataStore.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetadataStore.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/MetadataStore.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetastoreTaskThread.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/MetastoreTaskThread.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetastoreTaskThread.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/MetastoreTaskThread.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PartitionExpressionProxy.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/PartitionExpressionProxy.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PartitionExpressionProxy.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/PartitionExpressionProxy.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/TableIterable.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/TableIterable.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/TableIterable.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/TableIterable.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/conf/TimeValidator.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/TimeValidator.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/conf/TimeValidator.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/TimeValidator.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenIdentifier.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenIdentifier.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenIdentifier.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenIdentifier.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSecretManager.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSecretManager.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSecretManager.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSecretManager.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/DelegationTokenSelector.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge23.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge23.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge23.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge23.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/HdfsUtils.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/MetaStoreUtils.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java similarity index 100% rename from standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java index 571c789edd..93b57a1f81 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java @@ -26,8 +26,8 @@ import java.sql.Blob; import java.sql.Clob; import java.sql.Connection; +import java.sql.PreparedStatement; import java.sql.SQLException; -import java.sql.Statement; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; @@ -306,20 +306,20 @@ public boolean isCompatibleDatastore() { return isCompatibleDatastore; } - private void executeNoResult(final String queryText) throws SQLException { + private void executeNoResult(final String queryText, Object[] params) throws SQLException { JDOConnection jdoConn = pm.getDataStoreConnection(); - Statement statement = null; - boolean doTrace = LOG.isDebugEnabled(); - try { + try (PreparedStatement statement = ((Connection) jdoConn.getNativeConnection()).prepareStatement(queryText)) { + boolean doTrace = LOG.isDebugEnabled(); long start = doTrace ? System.nanoTime() : 0; - statement = ((Connection)jdoConn.getNativeConnection()).createStatement(); - statement.execute(queryText); + if (params != null) { + for (int i = 0; i < params.length; i++) { + statement.setObject(i + 1, params[i]); + } + } + statement.execute(); timingTrace(doTrace, queryText, start, doTrace ? System.nanoTime() : 0); } finally { - if(statement != null){ - statement.close(); - } - jdoConn.close(); // We must release the connection before we call other pm methods. + jdoConn.close(); } } @@ -640,7 +640,8 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw int idStringWidth = (int)Math.ceil(Math.log10(partIdList.size())) + 1; // 1 for comma int sbCapacity = partIdList.size() * idStringWidth; - String partIds = getIdListForIn(partIdList); + Object[] partIdParams = partIdList.toArray(new Object[partIdList.size()]); + String partIdParamsPlaceholder = makeParams(partIdParams.length); // Get most of the fields for the IDs provided. // Assume db and table names are the same for all partition, as provided in arguments. @@ -653,10 +654,12 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw + ".\"WRITE_ID\"" + " from " + PARTITIONS + "" + " left outer join " + SDS + " on " + PARTITIONS + ".\"SD_ID\" = " + SDS + ".\"SD_ID\" " + " left outer join " + SERDES + " on " + SDS + ".\"SERDE_ID\" = " + SERDES + ".\"SERDE_ID\" " - + "where \"PART_ID\" in (" + partIds + ") order by \"PART_NAME\" asc"; + + "where \"PART_ID\" in (" + partIdParamsPlaceholder + ") order by \"PART_NAME\" asc"; long start = doTrace ? System.nanoTime() : 0; Query query = pm.newQuery("javax.jdo.query.SQL", queryText); - List sqlResult = executeWithArray(query, null, queryText); + List sqlResult = executeWithArray(query, + partIdParams, + queryText); long queryTime = doTrace ? System.nanoTime() : 0; Deadline.checkTimeout(); @@ -668,9 +671,10 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw // Keep order by name, consistent with JDO. ArrayList orderedResult = new ArrayList(partIdList.size()); - // Prepare StringBuilder-s for "in (...)" lists to use in one-to-many queries. - StringBuilder sdSb = new StringBuilder(sbCapacity), serdeSb = new StringBuilder(sbCapacity); - StringBuilder colsSb = new StringBuilder(7); // We expect that there's only one field schema. + List sdList = new ArrayList<>(sbCapacity); + List serdeList = new ArrayList<>(sbCapacity); + List colsList = new ArrayList<>(7); + tblName = tblName.toLowerCase(); dbName = dbName.toLowerCase(); catName = normalizeSpace(catName).toLowerCase(); @@ -731,7 +735,7 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw sd.setLocation((String)fields[9]); if (fields[10] != null) sd.setNumBuckets(extractSqlInt(fields[10])); sd.setOutputFormat((String)fields[11]); - sdSb.append(sdId).append(","); + sdList.add(sdId); part.setSd(sd); if (colId != null) { @@ -740,7 +744,7 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw if (cols == null) { cols = new ArrayList(); colss.put(colId, cols); - colsSb.append(colId).append(","); + colsList.add(colId); } sd.setCols(cols); } @@ -754,7 +758,7 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw serde.setParameters(new HashMap()); serde.setName((String)fields[12]); serde.setSerializationLib((String)fields[13]); - serdeSb.append(serdeId).append(","); + serdeList.add(serdeId); sd.setSerdeInfo(serde); Deadline.checkTimeout(); @@ -764,9 +768,9 @@ private boolean isViewTable(String catName, String dbName, String tblName) throw // Now get all the one-to-many things. Start with partitions. queryText = "select \"PART_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + PARTITION_PARAMS + "" - + " where \"PART_ID\" in (" + partIds + ") and \"PARAM_KEY\" is not null" + + " where \"PART_ID\" in (" + partIdParamsPlaceholder + ") and \"PARAM_KEY\" is not null" + " order by \"PART_ID\" asc"; - loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(partitions, queryText, partIdParams, 0, new ApplyFunc() { @Override public void apply(Partition t, Object[] fields) { t.putToParameters((String)fields[1], extractSqlClob(fields[2])); @@ -777,29 +781,32 @@ public void apply(Partition t, Object[] fields) { } queryText = "select \"PART_ID\", \"PART_KEY_VAL\" from " + PARTITION_KEY_VALS + "" - + " where \"PART_ID\" in (" + partIds + ")" + + " where \"PART_ID\" in (" + partIdParamsPlaceholder + ")" + " order by \"PART_ID\" asc, \"INTEGER_IDX\" asc"; - loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(partitions, queryText, partIdParams,0, new ApplyFunc() { @Override public void apply(Partition t, Object[] fields) { t.addToValues((String)fields[1]); }}); // Prepare IN (blah) lists for the following queries. Cut off the final ','s. - if (sdSb.length() == 0) { - assert serdeSb.length() == 0 && colsSb.length() == 0; + if (sdList.size() == 0) { + assert serdeList.size() == 0 && colsList.size() == 0; return orderedResult; // No SDs, probably a view. } - String sdIds = trimCommaList(sdSb); - String serdeIds = trimCommaList(serdeSb); - String colIds = trimCommaList(colsSb); + Object[] sdParams = sdList.toArray(new Object[sdList.size()]); + String sdParamsPlaceholder = makeParams(sdParams.length); + Object[] colIdsParams = colsList.toArray(new Object[colsList.size()]); + String colIdsParamsPlaceholder = makeParams(colIdsParams.length); + Object[] serdeIdsParams = serdeList.toArray(new Object[serdeList.size()]); + String serdeIdsParamsPlaceholder = makeParams(serdeIdsParams.length); // Get all the stuff for SD. Don't do empty-list check - we expect partitions do have SDs. queryText = "select \"SD_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SD_PARAMS + "" - + " where \"SD_ID\" in (" + sdIds + ") and \"PARAM_KEY\" is not null" + + " where \"SD_ID\" in (" + sdParamsPlaceholder + ") and \"PARAM_KEY\" is not null" + " order by \"SD_ID\" asc"; - loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(sds, queryText, sdParams, 0, new ApplyFunc() { @Override public void apply(StorageDescriptor t, Object[] fields) { t.putToParameters((String)fields[1], extractSqlClob(fields[2])); @@ -811,9 +818,9 @@ public void apply(StorageDescriptor t, Object[] fields) { queryText = "select \"SD_ID\", \"COLUMN_NAME\", " + SORT_COLS + ".\"ORDER\"" + " from " + SORT_COLS + "" - + " where \"SD_ID\" in (" + sdIds + ")" + + " where \"SD_ID\" in (" + sdParamsPlaceholder + ")" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc"; - loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(sds, queryText, sdParams,0, new ApplyFunc() { @Override public void apply(StorageDescriptor t, Object[] fields) { if (fields[2] == null) return; @@ -821,9 +828,9 @@ public void apply(StorageDescriptor t, Object[] fields) { }}); queryText = "select \"SD_ID\", \"BUCKET_COL_NAME\" from " + BUCKETING_COLS + "" - + " where \"SD_ID\" in (" + sdIds + ")" + + " where \"SD_ID\" in (" + sdParamsPlaceholder + ")" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc"; - loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(sds, queryText, sdParams, 0, new ApplyFunc() { @Override public void apply(StorageDescriptor t, Object[] fields) { t.addToBucketCols((String)fields[1]); @@ -831,10 +838,10 @@ public void apply(StorageDescriptor t, Object[] fields) { // Skewed columns stuff. queryText = "select \"SD_ID\", \"SKEWED_COL_NAME\" from " + SKEWED_COL_NAMES + "" - + " where \"SD_ID\" in (" + sdIds + ")" + + " where \"SD_ID\" in (" + sdParamsPlaceholder + ")" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc"; boolean hasSkewedColumns = - loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(sds, queryText, sdParams, 0, new ApplyFunc() { @Override public void apply(StorageDescriptor t, Object[] fields) { if (!t.isSetSkewedInfo()) t.setSkewedInfo(new SkewedInfo()); @@ -851,12 +858,12 @@ public void apply(StorageDescriptor t, Object[] fields) { + "from " + SKEWED_VALUES + " " + " left outer join " + SKEWED_STRING_LIST_VALUES + " on " + SKEWED_VALUES + "." + "\"STRING_LIST_ID_EID\" = " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_ID\" " - + "where " + SKEWED_VALUES + ".\"SD_ID_OID\" in (" + sdIds + ") " + + "where " + SKEWED_VALUES + ".\"SD_ID_OID\" in (" + sdParamsPlaceholder + ") " + " and " + SKEWED_VALUES + ".\"STRING_LIST_ID_EID\" is not null " + " and " + SKEWED_VALUES + ".\"INTEGER_IDX\" >= 0 " + "order by " + SKEWED_VALUES + ".\"SD_ID_OID\" asc, " + SKEWED_VALUES + ".\"INTEGER_IDX\" asc," + " " + SKEWED_STRING_LIST_VALUES + ".\"INTEGER_IDX\" asc"; - loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(sds, queryText, sdParams,0, new ApplyFunc() { private Long currentListId; private List currentList; @Override @@ -888,13 +895,13 @@ public void apply(StorageDescriptor t, Object[] fields) throws MetaException { + "from " + SKEWED_COL_VALUE_LOC_MAP + "" + " left outer join " + SKEWED_STRING_LIST_VALUES + " on " + SKEWED_COL_VALUE_LOC_MAP + "." + "\"STRING_LIST_ID_KID\" = " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_ID\" " - + "where " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" in (" + sdIds + ")" + + "where " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" in (" + sdParamsPlaceholder + ")" + " and " + SKEWED_COL_VALUE_LOC_MAP + ".\"STRING_LIST_ID_KID\" is not null " + "order by " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" asc," + " " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_ID\" asc," + " " + SKEWED_STRING_LIST_VALUES + ".\"INTEGER_IDX\" asc"; - loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(sds, queryText, sdParams,0, new ApplyFunc() { private Long currentListId; private List currentList; @Override @@ -928,9 +935,9 @@ public void apply(StorageDescriptor t, Object[] fields) throws MetaException { if (!colss.isEmpty()) { // We are skipping the CDS table here, as it seems to be totally useless. queryText = "select \"CD_ID\", \"COMMENT\", \"COLUMN_NAME\", \"TYPE_NAME\"" - + " from " + COLUMNS_V2 + " where \"CD_ID\" in (" + colIds + ")" + + " from " + COLUMNS_V2 + " where \"CD_ID\" in (" + colIdsParamsPlaceholder + ")" + " order by \"CD_ID\" asc, \"INTEGER_IDX\" asc"; - loopJoinOrderedResult(colss, queryText, 0, new ApplyFunc>() { + loopJoinOrderedResult(colss, queryText, colIdsParams, 0, new ApplyFunc>() { @Override public void apply(List t, Object[] fields) { t.add(new FieldSchema((String)fields[2], extractSqlClob(fields[3]), (String)fields[1])); @@ -939,9 +946,12 @@ public void apply(List t, Object[] fields) { // Finally, get all the stuff for serdes - just the params. queryText = "select \"SERDE_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SERDE_PARAMS + "" - + " where \"SERDE_ID\" in (" + serdeIds + ") and \"PARAM_KEY\" is not null" + + " where \"SERDE_ID\" in (" + serdeIdsParamsPlaceholder + ") and \"PARAM_KEY\" is " + + "not" + + " " + + "null" + " order by \"SERDE_ID\" asc"; - loopJoinOrderedResult(serdes, queryText, 0, new ApplyFunc() { + loopJoinOrderedResult(serdes, queryText, serdeIdsParams, 0, new ApplyFunc() { @Override public void apply(SerDeInfo t, Object[] fields) { t.putToParameters((String)fields[1], extractSqlClob(fields[2])); @@ -1087,25 +1097,6 @@ else if (value instanceof byte[]) { } } - /** - * Helper method for preparing for "SOMETHING_ID in (...)" to use in future queries. - * @param objectIds the objectId collection - * @return The concatenated list - * @throws MetaException If the list contains wrong data - */ - private static String getIdListForIn(List objectIds) throws MetaException { - return objectIds.stream() - .map(i -> i.toString()) - .collect(Collectors.joining(",")); - } - - private static String trimCommaList(StringBuilder sb) { - if (sb.length() > 0) { - sb.setLength(sb.length() - 1); - } - return sb.toString(); - } - private abstract class ApplyFunc { public abstract void apply(Target t, Object[] fields) throws MetaException; } @@ -1116,21 +1107,23 @@ private static String trimCommaList(StringBuilder sb) { * separately for every object, which is suboptimal. * @param tree The object tree, by ID. * @param queryText The query text. + * @param parameters The query parameters. * @param keyIndex Index of the Long column corresponding to the map ID in query result rows. * @param func The function that is called on each (object,row) pair with the same id. * @return the count of results returned from the query. */ private int loopJoinOrderedResult(TreeMap tree, - String queryText, int keyIndex, ApplyFunc func) throws MetaException { + String queryText, Object[] parameters, int keyIndex, ApplyFunc func) throws MetaException { boolean doTrace = LOG.isDebugEnabled(); long start = doTrace ? System.nanoTime() : 0; Query query = pm.newQuery("javax.jdo.query.SQL", queryText); - Object result = query.execute(); + Object result = executeWithArray(query, parameters, queryText); long queryTime = doTrace ? System.nanoTime() : 0; if (result == null) { query.closeAll(); return 0; } + List list = ensureList(result); Iterator iter = list.iterator(); Object[] fields = null; @@ -2064,7 +2057,7 @@ public void prepareTxn() throws MetaException { if (dbType != DatabaseProduct.MYSQL) return; try { assert pm.currentTransaction().isActive(); // must be inside tx together with queries - executeNoResult("SET @@session.sql_mode=ANSI_QUOTES"); + executeNoResult("SET @@session.sql_mode=ANSI_QUOTES", null); } catch (SQLException sqlEx) { throw new MetaException("Error setting ansi quotes: " + sqlEx.getMessage()); } @@ -2502,17 +2495,19 @@ private void dropPartitionsByPartitionIds(List partitionIdList) throws M if (partitionIdList.isEmpty()) { return; } - String partitionIds = getIdListForIn(partitionIdList); + + Object[] partitionIdsParams = partitionIdList.toArray(new Object[partitionIdList.size()]); + String partitionIdsParamsPlaceholder = makeParams(partitionIdsParams.length); // Get the corresponding SD_ID-s, CD_ID-s, SERDE_ID-s queryText = "SELECT " + SDS + ".\"SD_ID\", " + SDS + ".\"CD_ID\", " + SDS + ".\"SERDE_ID\" " + "from " + SDS + " " + "INNER JOIN " + PARTITIONS + " ON " + PARTITIONS + ".\"SD_ID\" = " + SDS + ".\"SD_ID\" " - + "WHERE " + PARTITIONS + ".\"PART_ID\" in (" + partitionIds + ")"; + + "WHERE " + PARTITIONS + ".\"PART_ID\" in (" + partitionIdsParamsPlaceholder + ")"; Query query = pm.newQuery("javax.jdo.query.SQL", queryText); - List sqlResult = ensureList(executeWithArray(query, null, queryText)); + List sqlResult = ensureList(executeWithArray(query, partitionIdsParams, queryText)); List sdIdList = new ArrayList<>(partitionIdList.size()); List columnDescriptorIdList = new ArrayList<>(1); @@ -2532,35 +2527,41 @@ private void dropPartitionsByPartitionIds(List partitionIdList) throws M try { // Drop privileges - queryText = "delete from " + PART_PRIVS + " where \"PART_ID\" in (" + partitionIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + PART_PRIVS + " where \"PART_ID\" in (" + partitionIdsParamsPlaceholder + ")"; + executeNoResult(queryText, partitionIdsParams); Deadline.checkTimeout(); // Drop column level privileges - queryText = "delete from " + PART_COL_PRIVS + " where \"PART_ID\" in (" + partitionIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + PART_COL_PRIVS + " where \"PART_ID\" in (" + partitionIdsParamsPlaceholder + + ")"; + executeNoResult(queryText, partitionIdsParams); Deadline.checkTimeout(); // Drop partition statistics - queryText = "delete from " + PART_COL_STATS + " where \"PART_ID\" in (" + partitionIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + PART_COL_STATS + " where \"PART_ID\" in (" + partitionIdsParamsPlaceholder + + ")"; + executeNoResult(queryText, partitionIdsParams); Deadline.checkTimeout(); // Drop the partition params queryText = "delete from " + PARTITION_PARAMS + " where \"PART_ID\" in (" - + partitionIds + ")"; - executeNoResult(queryText); + + partitionIdsParamsPlaceholder + ")"; + executeNoResult(queryText, partitionIdsParams); Deadline.checkTimeout(); // Drop the partition key vals queryText = "delete from " + PARTITION_KEY_VALS + " where \"PART_ID\" in (" - + partitionIds + ")"; - executeNoResult(queryText); + + partitionIdsParamsPlaceholder + ")"; + executeNoResult(queryText, partitionIdsParams); Deadline.checkTimeout(); // Drop the partitions - queryText = "delete from " + PARTITIONS + " where \"PART_ID\" in (" + partitionIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + PARTITIONS + " where \"PART_ID\" in (" + partitionIdsParamsPlaceholder + ")"; + executeNoResult(queryText, partitionIdsParams); Deadline.checkTimeout(); } catch (SQLException sqlException) { LOG.warn("SQL error executing query while dropping partition", sqlException); @@ -2586,16 +2587,18 @@ private void dropStorageDescriptors(List storageDescriptorIdList) throws return; } String queryText; - String sdIds = getIdListForIn(storageDescriptorIdList); + Object[] sdIdsParams = + storageDescriptorIdList.toArray(new Object[storageDescriptorIdList.size()]); + String sdIdsParamsPlaceholder = makeParams(sdIdsParams.length); // Get the corresponding SKEWED_STRING_LIST_ID data queryText = "select " + SKEWED_VALUES + ".\"STRING_LIST_ID_EID\" " + "from " + SKEWED_VALUES + " " - + "WHERE " + SKEWED_VALUES + ".\"SD_ID_OID\" in (" + sdIds + ")"; + + "WHERE " + SKEWED_VALUES + ".\"SD_ID_OID\" in (" + sdIdsParamsPlaceholder + ")"; Query query = pm.newQuery("javax.jdo.query.SQL", queryText); - List sqlResult = ensureList(executeWithArray(query, null, queryText)); + List sqlResult = ensureList(executeWithArray(query, sdIdsParams, queryText)); List skewedStringListIdList = new ArrayList<>(0); @@ -2606,58 +2609,65 @@ private void dropStorageDescriptors(List storageDescriptorIdList) throws } query.closeAll(); - String skewedStringListIds = getIdListForIn(skewedStringListIdList); + Object[] skewedStringParams = + skewedStringListIdList.toArray(new Object[skewedStringListIdList.size()]); + String skewedStringParamsPlaceholder = makeParams(skewedStringParams.length); try { // Drop the SD params - queryText = "delete from " + SD_PARAMS + " where \"SD_ID\" in (" + sdIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + SD_PARAMS + " where \"SD_ID\" in (" + sdIdsParamsPlaceholder + + ")"; + executeNoResult(queryText, sdIdsParams); Deadline.checkTimeout(); // Drop the sort cols - queryText = "delete from " + SORT_COLS + " where \"SD_ID\" in (" + sdIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + SORT_COLS + " where \"SD_ID\" in (" + sdIdsParamsPlaceholder + + ")"; + executeNoResult(queryText, sdIdsParams); Deadline.checkTimeout(); // Drop the bucketing cols - queryText = "delete from " + BUCKETING_COLS + " where \"SD_ID\" in (" + sdIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + BUCKETING_COLS + " where \"SD_ID\" in (" + sdIdsParamsPlaceholder + ")"; + executeNoResult(queryText, sdIdsParams); Deadline.checkTimeout(); // Drop the skewed string lists if (skewedStringListIdList.size() > 0) { // Drop the skewed string value loc map queryText = "delete from " + SKEWED_COL_VALUE_LOC_MAP + " where \"SD_ID\" in (" - + sdIds + ")"; - executeNoResult(queryText); + + sdIdsParamsPlaceholder + ")"; + executeNoResult(queryText, sdIdsParams); Deadline.checkTimeout(); // Drop the skewed values - queryText = "delete from " + SKEWED_VALUES + " where \"SD_ID_OID\" in (" + sdIds + ")"; - executeNoResult(queryText); + queryText = "delete from " + SKEWED_VALUES + " where \"SD_ID_OID\" in (" + sdIdsParamsPlaceholder + ")"; + executeNoResult(queryText, sdIdsParams); Deadline.checkTimeout(); // Drop the skewed string list values queryText = "delete from " + SKEWED_STRING_LIST_VALUES + " where \"STRING_LIST_ID\" in (" - + skewedStringListIds + ")"; - executeNoResult(queryText); + + skewedStringParamsPlaceholder + ")"; + executeNoResult(queryText, skewedStringParams); Deadline.checkTimeout(); // Drop the skewed string list queryText = "delete from " + SKEWED_STRING_LIST + " where \"STRING_LIST_ID\" in (" - + skewedStringListIds + ")"; - executeNoResult(queryText); + + skewedStringParamsPlaceholder + ")"; + executeNoResult(queryText, skewedStringParams); Deadline.checkTimeout(); } // Drop the skewed cols - queryText = "delete from " + SKEWED_COL_NAMES + " where \"SD_ID\" in (" + sdIds + ")"; - executeNoResult(queryText); + queryText = "delete from " + SKEWED_COL_NAMES + " where \"SD_ID\" in (" + sdIdsParamsPlaceholder + ")"; + executeNoResult(queryText, sdIdsParams); Deadline.checkTimeout(); // Drop the sds - queryText = "delete from " + SDS + " where \"SD_ID\" in (" + sdIds + ")"; - executeNoResult(queryText); + queryText = "delete from " + SDS + " where \"SD_ID\" in (" + sdIdsParamsPlaceholder + ")"; + executeNoResult(queryText, sdIdsParams); } catch (SQLException sqlException) { LOG.warn("SQL error executing query while dropping storage descriptor.", sqlException); throw new MetaException("Encountered error while dropping storage descriptor."); @@ -2675,17 +2685,18 @@ private void dropSerdes(List serdeIdList) throws MetaException { if (serdeIdList.isEmpty()) { return; } - String serdeIds = getIdListForIn(serdeIdList); + Object[] serdeIdsParams = serdeIdList.toArray(new Object[serdeIdList.size()]); + String serdeIdsParamsPlaceholder = makeParams(serdeIdsParams.length); try { // Drop the serde params - queryText = "delete from " + SERDE_PARAMS + " where \"SERDE_ID\" in (" + serdeIds + ")"; - executeNoResult(queryText); + queryText = "delete from " + SERDE_PARAMS + " where \"SERDE_ID\" in (" + serdeIdsParamsPlaceholder + ")"; + executeNoResult(queryText, serdeIdsParams); Deadline.checkTimeout(); // Drop the serdes - queryText = "delete from " + SERDES + " where \"SERDE_ID\" in (" + serdeIds + ")"; - executeNoResult(queryText); + queryText = "delete from " + SERDES + " where \"SERDE_ID\" in (" + serdeIdsParamsPlaceholder + ")"; + executeNoResult(queryText, serdeIdsParams); } catch (SQLException sqlException) { LOG.warn("SQL error executing query while dropping serde.", sqlException); throw new MetaException("Encountered error while dropping serde."); @@ -2705,16 +2716,18 @@ private void dropDanglingColumnDescriptors(List columnDescriptorIdList) return; } String queryText; - String colIds = getIdListForIn(columnDescriptorIdList); + Object[] coldIdsParams = + columnDescriptorIdList.toArray(new Object[columnDescriptorIdList.size()]); + String colIdsParamsPlaceholder = makeParams(coldIdsParams.length); // Drop column descriptor, if no relation left queryText = "SELECT " + SDS + ".\"CD_ID\", count(1) " + "from " + SDS + " " - + "WHERE " + SDS + ".\"CD_ID\" in (" + colIds + ") " + + "WHERE " + SDS + ".\"CD_ID\" in (" + colIdsParamsPlaceholder + ") " + "GROUP BY " + SDS + ".\"CD_ID\""; Query query = pm.newQuery("javax.jdo.query.SQL", queryText); - List sqlResult = ensureList(executeWithArray(query, null, queryText)); + List sqlResult = ensureList(executeWithArray(query, coldIdsParams, queryText)); List danglingColumnDescriptorIdList = new ArrayList<>(columnDescriptorIdList.size()); if (!sqlResult.isEmpty()) { @@ -2728,16 +2741,19 @@ private void dropDanglingColumnDescriptors(List columnDescriptorIdList) if (!danglingColumnDescriptorIdList.isEmpty()) { try { - String danglingCDIds = getIdListForIn(danglingColumnDescriptorIdList); + Object[] danglingCDIdsParams = + danglingColumnDescriptorIdList.toArray(new Object[danglingColumnDescriptorIdList.size()]); + String danglingCDIdsParamsPlaceholder = makeParams(danglingCDIdsParams.length); // Drop the columns_v2 - queryText = "delete from " + COLUMNS_V2 + " where \"CD_ID\" in (" + danglingCDIds + ")"; - executeNoResult(queryText); + queryText = "delete from " + COLUMNS_V2 + " where \"CD_ID\" in (" + danglingCDIdsParamsPlaceholder + ")"; + executeNoResult(queryText, danglingCDIdsParams); Deadline.checkTimeout(); // Drop the cols - queryText = "delete from " + CDS + " where \"CD_ID\" in (" + danglingCDIds + ")"; - executeNoResult(queryText); + queryText = + "delete from " + CDS + " where \"CD_ID\" in (" + danglingCDIdsParamsPlaceholder + ")"; + executeNoResult(queryText, danglingCDIdsParams); } catch (SQLException sqlException) { LOG.warn("SQL error executing query while dropping dangling col descriptions", sqlException); throw new MetaException("Encountered error while dropping col descriptions"); diff --git storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java index dcbba7a9fc..3095114ae3 100644 --- storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java +++ storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampColumnVector.java @@ -38,8 +38,6 @@ private static int TEST_COUNT = 5000; - private static int fake = 0; - @Test public void testSaveAndRetrieve() throws Exception { diff --git streaming/pom.xml streaming/pom.xml index 99697a8575..22fe61995d 100644 --- streaming/pom.xml +++ streaming/pom.xml @@ -91,6 +91,12 @@ ${project.version} test + + org.apache.hive + hive-standalone-metastore-server + 4.0.0-SNAPSHOT + test + diff --git testutils/ptest2/conf/deployed/master-mr2.properties testutils/ptest2/conf/deployed/master-mr2.properties index 333c3def03..23ad0f687f 100644 --- testutils/ptest2/conf/deployed/master-mr2.properties +++ testutils/ptest2/conf/deployed/master-mr2.properties @@ -182,7 +182,7 @@ qFileTest.erasurecodingCli.groups.normal = mainProperties.${erasurecoding.only.q qFileTest.miniDruid.driver = TestMiniDruidCliDriver qFileTest.miniDruid.directory = ql/src/test/queries/clientpositive -qFileTest.miniDruid.batchSize = 50 +qFileTest.miniDruid.batchSize = 55 qFileTest.miniDruid.queryFilesProperty = qfile qFileTest.miniDruid.include = normal qFileTest.miniDruid.groups.normal = mainProperties.${druid.query.files}